diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 92b75accdcdb5..dd51082981ffc 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -37,3 +37,4 @@ BWC_VERSION: - "1.2.4" - "1.2.5" - "1.3.0" + - "1.4.0" \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000000000..0db72d2a19cd1 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,2 @@ +# disable blank issue creation +blank_issues_enabled: false diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 7e44ad238d131..ca972d1b242e3 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,881 +1,871 @@ updates: - directory: / - open-pull-requests-limit: 10 + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /benchmarks/build.gradle - open-pull-requests-limit: 10 + - directory: /benchmarks/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /buildSrc/build.gradle - open-pull-requests-limit: 10 + - directory: /buildSrc/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /buildSrc/reaper/build.gradle - open-pull-requests-limit: 10 + - directory: /buildSrc/reaper/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/build.gradle - open-pull-requests-limit: 10 + - directory: /buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/distribution/archives/build.gradle - open-pull-requests-limit: 10 + - directory: /buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/distribution/archives/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/distribution/archives/darwin-tar/build.gradle - open-pull-requests-limit: 10 + - directory: /buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/distribution/archives/darwin-tar/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/distribution/archives/oss-darwin-tar/build.gradle - open-pull-requests-limit: 10 + - directory: /buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/distribution/archives/oss-darwin-tar/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/distribution/bwc/bugfix/build.gradle - open-pull-requests-limit: 10 + - directory: /buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/distribution/bwc/bugfix/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/distribution/bwc/minor/build.gradle - open-pull-requests-limit: 10 + - directory: /buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/distribution/bwc/minor/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /buildSrc/src/testKit/opensearch-build-resources/build.gradle - open-pull-requests-limit: 10 + - directory: /buildSrc/src/testKit/opensearch-build-resources/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /buildSrc/src/testKit/opensearch.build/build.gradle - open-pull-requests-limit: 10 + - directory: /buildSrc/src/testKit/opensearch.build/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /buildSrc/src/testKit/reaper/build.gradle - open-pull-requests-limit: 10 + - directory: /buildSrc/src/testKit/reaper/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /buildSrc/src/testKit/symbolic-link-preserving-tar/build.gradle - open-pull-requests-limit: 10 + - directory: /buildSrc/src/testKit/symbolic-link-preserving-tar/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /buildSrc/src/testKit/testingConventions/build.gradle - open-pull-requests-limit: 10 + - directory: /buildSrc/src/testKit/testingConventions/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /buildSrc/src/testKit/thirdPartyAudit/build.gradle - open-pull-requests-limit: 10 + - directory: /buildSrc/src/testKit/thirdPartyAudit/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle - open-pull-requests-limit: 10 + - directory: /buildSrc/src/testKit/thirdPartyAudit/sample_jars/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /client/benchmark/build.gradle - open-pull-requests-limit: 10 + - directory: /client/benchmark/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /client/client-benchmark-noop-api-plugin/build.gradle - open-pull-requests-limit: 10 + - directory: /client/client-benchmark-noop-api-plugin/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /client/rest/build.gradle - open-pull-requests-limit: 10 + - directory: /client/rest/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /client/rest-high-level/build.gradle - open-pull-requests-limit: 10 + - directory: /client/rest-high-level/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /client/sniffer/build.gradle - open-pull-requests-limit: 10 + - directory: /client/sniffer/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /client/test/build.gradle - open-pull-requests-limit: 10 + - directory: /client/test/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /client/transport/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/archives/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/archives/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/archives/darwin-tar/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/archives/darwin-tar/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/archives/integ-test-zip/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/archives/integ-test-zip/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/archives/linux-arm64-tar/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/archives/linux-arm64-tar/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/archives/linux-tar/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/archives/linux-tar/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/archives/no-jdk-darwin-tar/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/archives/no-jdk-darwin-tar/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/archives/no-jdk-linux-tar/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/archives/no-jdk-linux-tar/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/archives/no-jdk-windows-zip/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/archives/no-jdk-windows-zip/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/archives/windows-zip/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/archives/windows-zip/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/bwc/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/bwc/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/bwc/bugfix/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/bwc/bugfix/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/bwc/maintenance/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/bwc/maintenance/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/bwc/minor/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/bwc/minor/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/bwc/staged/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/bwc/staged/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/docker/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/docker/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/docker/docker-arm64-export/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/docker/docker-arm64-export/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/docker/docker-build-context/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/docker/docker-build-context/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/docker/docker-export/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/docker/docker-export/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/packages/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/packages/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/packages/arm64-deb/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/packages/arm64-deb/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/packages/arm64-rpm/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/packages/arm64-rpm/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/packages/deb/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/packages/deb/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/packages/no-jdk-deb/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/packages/no-jdk-deb/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/packages/no-jdk-rpm/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/packages/no-jdk-rpm/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/packages/rpm/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/packages/rpm/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/tools/java-version-checker/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/tools/java-version-checker/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/tools/keystore-cli/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/tools/keystore-cli/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/tools/launchers/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/tools/launchers/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/tools/plugin-cli/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/tools/plugin-cli/build.gradle - open-pull-requests-limit: 10 + - directory: /distribution/tools/upgrade-cli/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /distribution/tools/upgrade-cli/build.gradle - open-pull-requests-limit: 10 + - directory: /doc-tools/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /doc-tools/build.gradle - open-pull-requests-limit: 10 + - directory: /doc-tools/missing-doclet/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /doc-tools/missing-doclet/build.gradle - open-pull-requests-limit: 10 + - directory: /libs/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /libs/build.gradle - open-pull-requests-limit: 10 + - directory: /libs/cli/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /libs/cli/build.gradle - open-pull-requests-limit: 10 + - directory: /libs/core/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /libs/core/build.gradle - open-pull-requests-limit: 10 + - directory: /libs/dissect/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /libs/dissect/build.gradle - open-pull-requests-limit: 10 + - directory: /libs/geo/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /libs/geo/build.gradle - open-pull-requests-limit: 10 + - directory: /libs/grok/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /libs/grok/build.gradle - open-pull-requests-limit: 10 + - directory: /libs/nio/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /libs/nio/build.gradle - open-pull-requests-limit: 10 + - directory: /libs/plugin-classloader/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /libs/plugin-classloader/build.gradle - open-pull-requests-limit: 10 + - directory: /libs/secure-sm/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /libs/secure-sm/build.gradle - open-pull-requests-limit: 10 + - directory: /libs/ssl-config/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /libs/ssl-config/build.gradle - open-pull-requests-limit: 10 + - directory: /libs/x-content/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /libs/x-content/build.gradle - open-pull-requests-limit: 10 + - directory: /modules/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /modules/build.gradle - open-pull-requests-limit: 10 + - directory: /modules/aggs-matrix-stats/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /modules/aggs-matrix-stats/build.gradle - open-pull-requests-limit: 10 + - directory: /modules/analysis-common/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /modules/analysis-common/build.gradle - open-pull-requests-limit: 10 + - directory: /modules/geo/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /modules/geo/build.gradle - open-pull-requests-limit: 10 + - directory: /modules/ingest-common/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /modules/ingest-common/build.gradle - open-pull-requests-limit: 10 + - directory: /modules/ingest-geoip/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /modules/ingest-geoip/build.gradle - open-pull-requests-limit: 10 + - directory: /modules/ingest-user-agent/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /modules/ingest-user-agent/build.gradle - open-pull-requests-limit: 10 + - directory: /modules/lang-expression/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /modules/lang-expression/build.gradle - open-pull-requests-limit: 10 + - directory: /modules/lang-mustache/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /modules/lang-mustache/build.gradle - open-pull-requests-limit: 10 + - directory: /modules/lang-painless/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /modules/lang-painless/build.gradle - open-pull-requests-limit: 10 + - directory: /modules/lang-painless/spi/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /modules/lang-painless/spi/build.gradle - open-pull-requests-limit: 10 + - directory: /modules/mapper-extras/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /modules/mapper-extras/build.gradle - open-pull-requests-limit: 10 + - directory: /modules/opensearch-dashboards/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /modules/opensearch-dashboards/build.gradle - open-pull-requests-limit: 10 + - directory: /modules/parent-join/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /modules/parent-join/build.gradle - open-pull-requests-limit: 10 + - directory: /modules/percolator/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /modules/percolator/build.gradle - open-pull-requests-limit: 10 + - directory: /modules/rank-eval/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /modules/rank-eval/build.gradle - open-pull-requests-limit: 10 + - directory: /modules/reindex/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /modules/reindex/build.gradle - open-pull-requests-limit: 10 + - directory: /modules/repository-url/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /modules/repository-url/build.gradle - open-pull-requests-limit: 10 + - directory: /modules/systemd/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /modules/systemd/build.gradle - open-pull-requests-limit: 10 + - directory: /modules/transport-netty4/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /modules/transport-netty4/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/analysis-icu/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/analysis-icu/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/analysis-kuromoji/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/analysis-kuromoji/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/analysis-nori/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/analysis-nori/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/analysis-phonetic/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/analysis-phonetic/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/analysis-smartcn/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/analysis-smartcn/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/analysis-stempel/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/analysis-stempel/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/analysis-ukrainian/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/analysis-ukrainian/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/discovery-azure-classic/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/discovery-azure-classic/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/discovery-ec2/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/discovery-ec2/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/discovery-ec2/qa/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/discovery-ec2/qa/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/discovery-ec2/qa/amazon-ec2/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/discovery-ec2/qa/amazon-ec2/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/discovery-gce/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/discovery-gce/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/discovery-gce/qa/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/discovery-gce/qa/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/discovery-gce/qa/gce/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/discovery-gce/qa/gce/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/examples/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/examples/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/examples/custom-settings/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/examples/custom-settings/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/examples/custom-significance-heuristic/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/examples/custom-significance-heuristic/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/examples/custom-suggester/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/examples/custom-suggester/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/examples/painless-whitelist/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/examples/painless-whitelist/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/examples/rescore/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/examples/rescore/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/examples/rest-handler/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/examples/rest-handler/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/examples/script-expert-scoring/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/examples/script-expert-scoring/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/ingest-attachment/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/ingest-attachment/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/mapper-annotated-text/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/mapper-annotated-text/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/mapper-murmur3/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/mapper-murmur3/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/mapper-size/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/mapper-size/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/repository-azure/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/repository-azure/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/repository-gcs/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/repository-gcs/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/repository-hdfs/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/repository-hdfs/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/repository-s3/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/repository-s3/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/store-smb/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/store-smb/build.gradle - open-pull-requests-limit: 10 + - directory: /plugins/transport-nio/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /plugins/transport-nio/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/ccs-unavailable-clusters/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/ccs-unavailable-clusters/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/die-with-dignity/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/die-with-dignity/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/evil-tests/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/evil-tests/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/full-cluster-restart/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/full-cluster-restart/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/logging-config/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/logging-config/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/mixed-cluster/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/mixed-cluster/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/multi-cluster-search/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/multi-cluster-search/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/no-bootstrap-tests/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/no-bootstrap-tests/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/os/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/os/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/os/centos-6/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/os/centos-6/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/os/centos-7/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/os/centos-7/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/os/debian-8/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/os/debian-8/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/os/debian-9/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/os/debian-9/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/os/fedora-28/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/os/fedora-28/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/os/fedora-29/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/os/fedora-29/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/os/oel-6/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/os/oel-6/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/os/oel-7/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/os/oel-7/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/os/sles-12/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/os/sles-12/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/os/ubuntu-1604/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/os/ubuntu-1604/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/os/ubuntu-1804/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/os/ubuntu-1804/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/os/windows-2012r2/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/os/windows-2012r2/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/os/windows-2016/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/os/windows-2016/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/remote-clusters/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/remote-clusters/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/repository-multi-version/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/repository-multi-version/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/rolling-upgrade/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/rolling-upgrade/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/smoke-test-http/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/smoke-test-client/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/smoke-test-ingest-disabled/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/smoke-test-http/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/smoke-test-ingest-with-all-dependencies/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/smoke-test-ingest-disabled/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/smoke-test-multinode/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/smoke-test-ingest-with-all-dependencies/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/smoke-test-plugins/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/smoke-test-multinode/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/translog-policy/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/smoke-test-plugins/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/unconfigured-node-name/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/translog-policy/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/verify-version-constants/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/unconfigured-node-name/build.gradle - open-pull-requests-limit: 10 + - directory: /qa/wildfly/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/verify-version-constants/build.gradle - open-pull-requests-limit: 10 + - directory: /rest-api-spec/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /qa/wildfly/build.gradle - open-pull-requests-limit: 10 + - directory: /sandbox/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /rest-api-spec/build.gradle - open-pull-requests-limit: 10 + - directory: /sandbox/libs/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /sandbox/build.gradle - open-pull-requests-limit: 10 + - directory: /sandbox/modules/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /sandbox/libs/build.gradle - open-pull-requests-limit: 10 + - directory: /sandbox/plugins/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /sandbox/modules/build.gradle - open-pull-requests-limit: 10 + - directory: /server/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /sandbox/plugins/build.gradle - open-pull-requests-limit: 10 + - directory: /test/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /server/build.gradle - open-pull-requests-limit: 10 + - directory: /test/external-modules/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /test/build.gradle - open-pull-requests-limit: 10 + - directory: /test/external-modules/delayed-aggs/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /test/external-modules/build.gradle - open-pull-requests-limit: 10 + - directory: /test/fixtures/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /test/external-modules/delayed-aggs/build.gradle - open-pull-requests-limit: 10 + - directory: /test/fixtures/azure-fixture/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /test/fixtures/build.gradle - open-pull-requests-limit: 10 + - directory: /test/fixtures/gcs-fixture/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /test/fixtures/azure-fixture/build.gradle - open-pull-requests-limit: 10 + - directory: /test/fixtures/hdfs-fixture/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /test/fixtures/gcs-fixture/build.gradle - open-pull-requests-limit: 10 + - directory: /test/fixtures/krb5kdc-fixture/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /test/fixtures/hdfs-fixture/build.gradle - open-pull-requests-limit: 10 + - directory: /test/fixtures/minio-fixture/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /test/fixtures/krb5kdc-fixture/build.gradle - open-pull-requests-limit: 10 + - directory: /test/fixtures/old-elasticsearch/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /test/fixtures/minio-fixture/build.gradle - open-pull-requests-limit: 10 + - directory: /test/fixtures/s3-fixture/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /test/fixtures/old-elasticsearch/build.gradle - open-pull-requests-limit: 10 + - directory: /test/framework/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly - - directory: /test/fixtures/s3-fixture/build.gradle - open-pull-requests-limit: 10 - package-ecosystem: gradle - schedule: - interval: weekly - - directory: /test/framework/build.gradle - open-pull-requests-limit: 10 - package-ecosystem: gradle - schedule: - interval: weekly - - directory: /test/logger-usage/build.gradle - open-pull-requests-limit: 10 + - directory: /test/logger-usage/ + open-pull-requests-limit: 1 package-ecosystem: gradle schedule: interval: weekly diff --git a/.github/workflows/dco.yml b/.github/workflows/dco.yml deleted file mode 100644 index cf30ea89dcbcb..0000000000000 --- a/.github/workflows/dco.yml +++ /dev/null @@ -1,18 +0,0 @@ -name: Developer Certificate of Origin Check - -on: [pull_request] - -jobs: - check: - runs-on: ubuntu-latest - - steps: - - name: Get PR Commits - id: 'get-pr-commits' - uses: tim-actions/get-pr-commits@v1.1.0 - with: - token: ${{ secrets.GITHUB_TOKEN }} - - name: DCO Check - uses: tim-actions/dco@v1.1.0 - with: - commits: ${{ steps.get-pr-commits.outputs.commits }} diff --git a/.github/workflows/dependabot_pr.yml b/.github/workflows/dependabot_pr.yml new file mode 100644 index 0000000000000..2ac904bf4ccf7 --- /dev/null +++ b/.github/workflows/dependabot_pr.yml @@ -0,0 +1,49 @@ +name: Dependabot PR actions +on: pull_request + +jobs: + dependabot: + runs-on: ubuntu-latest + permissions: + pull-requests: write + contents: write + if: ${{ github.actor == 'dependabot[bot]' }} + steps: + - name: GitHub App token + id: github_app_token + uses: tibdex/github-app-token@v1.5.0 + with: + app_id: ${{ secrets.APP_ID }} + private_key: ${{ secrets.APP_PRIVATE_KEY }} + installation_id: 22958780 + + - name: Check out code + uses: actions/checkout@v2 + with: + token: ${{ steps.github_app_token.outputs.token }} + + - name: Update Gradle SHAs + run: | + ./gradlew updateSHAs + + - name: Commit the changes + uses: stefanzweifel/git-auto-commit-action@v4.7.2 + with: + commit_message: Updating SHAs + branch: ${{ github.head_ref }} + commit_user_name: dependabot[bot] + commit_user_email: support@github.com + commit_options: '--signoff' + + - name: Run spotless + run: | + ./gradlew spotlessApply + + - name: Commit the changes + uses: stefanzweifel/git-auto-commit-action@v4.7.2 + with: + commit_message: Spotless formatting + branch: ${{ github.head_ref }} + commit_user_name: dependabot[bot] + commit_user_email: support@github.com + commit_options: '--signoff' diff --git a/.github/workflows/version.yml b/.github/workflows/version.yml index 2d3bc512dc646..b42e7c4f2f317 100644 --- a/.github/workflows/version.yml +++ b/.github/workflows/version.yml @@ -8,9 +8,15 @@ on: jobs: build: runs-on: ubuntu-latest - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: + - name: GitHub App token + id: github_app_token + uses: tibdex/github-app-token@v1.5.0 + with: + app_id: ${{ secrets.APP_ID }} + private_key: ${{ secrets.APP_PRIVATE_KEY }} + installation_id: 22958780 + - uses: actions/checkout@v2 - name: Fetch Tag and Version Information run: | @@ -35,6 +41,8 @@ jobs: - uses: actions/checkout@v2 with: ref: ${{ env.BASE }} + token: ${{ steps.github_app_token.outputs.token }} + - name: Increment Patch Version run: | echo Incrementing $CURRENT_VERSION to $NEXT_VERSION @@ -43,9 +51,11 @@ jobs: echo Adding $NEXT_VERSION_UNDERSCORE after $CURRENT_VERSION_UNDERSCORE sed -i "s/public static final Version $CURRENT_VERSION_UNDERSCORE = new Version(\([[:digit:]]\+\)\(.*\));/\0\n public static final Version $NEXT_VERSION_UNDERSCORE = new Version($NEXT_VERSION_ID\2);/g" server/src/main/java/org/opensearch/Version.java sed -i "s/CURRENT = $CURRENT_VERSION_UNDERSCORE;/CURRENT = $NEXT_VERSION_UNDERSCORE;/g" server/src/main/java/org/opensearch/Version.java + - name: Create Pull Request uses: peter-evans/create-pull-request@v3 with: + token: ${{ steps.github_app_token.outputs.token }} base: ${{ env.BASE }} branch: 'create-pull-request/patch-${{ env.BASE }}' commit-message: Incremented version to ${{ env.NEXT_VERSION }} @@ -57,15 +67,19 @@ jobs: - uses: actions/checkout@v2 with: ref: ${{ env.BASE_X }} + token: ${{ steps.github_app_token.outputs.token }} + - name: Add bwc version to .X branch run: | echo Adding bwc version $NEXT_VERSION after $CURRENT_VERSION sed -i "s/- \"$CURRENT_VERSION\"/\0\n - \"$NEXT_VERSION\"/g" .ci/bwcVersions echo Adding $NEXT_VERSION_UNDERSCORE after $CURRENT_VERSION_UNDERSCORE sed -i "s/public static final Version $CURRENT_VERSION_UNDERSCORE = new Version(\([[:digit:]]\+\)\(.*\));/\0\n public static final Version $NEXT_VERSION_UNDERSCORE = new Version($NEXT_VERSION_ID\2);/g" server/src/main/java/org/opensearch/Version.java + - name: Create Pull Request uses: peter-evans/create-pull-request@v3 with: + token: ${{ steps.github_app_token.outputs.token }} base: ${{ env.BASE_X }} branch: 'create-pull-request/patch-${{ env.BASE_X }}' commit-message: Added bwc version ${{ env.NEXT_VERSION }} @@ -77,15 +91,19 @@ jobs: - uses: actions/checkout@v2 with: ref: main + token: ${{ steps.github_app_token.outputs.token }} + - name: Add bwc version to main branch run: | echo Adding bwc version $NEXT_VERSION after $CURRENT_VERSION sed -i "s/- \"$CURRENT_VERSION\"/\0\n - \"$NEXT_VERSION\"/g" .ci/bwcVersions echo Adding $NEXT_VERSION_UNDERSCORE after $CURRENT_VERSION_UNDERSCORE sed -i "s/public static final Version $CURRENT_VERSION_UNDERSCORE = new Version(\([[:digit:]]\+\)\(.*\));/\0\n public static final Version $NEXT_VERSION_UNDERSCORE = new Version($NEXT_VERSION_ID\2);/g" server/src/main/java/org/opensearch/Version.java + - name: Create Pull Request uses: peter-evans/create-pull-request@v3 with: + token: ${{ steps.github_app_token.outputs.token }} base: main branch: 'create-pull-request/patch-main' commit-message: Added bwc version ${{ env.NEXT_VERSION }} diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index 6e3886a04e6da..58444441e3258 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -61,7 +61,19 @@ Fork [opensearch-project/OpenSearch](https://github.com/opensearch-project/OpenS #### JDK 11 -OpenSearch builds using Java 11 at a minimum. This means you must have a JDK 11 installed with the environment variable `JAVA_HOME` referencing the path to Java home for your JDK 11 installation, e.g. `JAVA_HOME=/usr/lib/jvm/jdk-11`. +OpenSearch builds using Java 11 at a minimum, using the Adoptium distribution. This means you must have a JDK 11 installed with the environment variable `JAVA_HOME` referencing the path to Java home for your JDK 11 installation, e.g. `JAVA_HOME=/usr/lib/jvm/jdk-11`. This is configured in [buildSrc/build.gradle](buildSrc/build.gradle) and [distribution/tools/java-version-checker/build.gradle](distribution/tools/java-version-checker/build.gradle). + +``` +allprojects { + targetCompatibility = JavaVersion.VERSION_11 + sourceCompatibility = JavaVersion.VERSION_11 +} +``` + +``` +sourceCompatibility = JavaVersion.VERSION_11 +targetCompatibility = JavaVersion.VERSION_11 +``` Download Java 11 from [here](https://adoptium.net/releases.html?variant=openjdk11). @@ -69,9 +81,18 @@ Download Java 11 from [here](https://adoptium.net/releases.html?variant=openjdk1 To run the full suite of tests, download and install [JDK 14](https://jdk.java.net/archive/) and set `JAVA11_HOME`, and `JAVA14_HOME`. They are required by the [backwards compatibility test](./TESTING.md#testing-backwards-compatibility). -#### Runtime JDK +#### JDK 17 + +By default, the test tasks use bundled JDK runtime, configured in [buildSrc/version.properties](buildSrc/version.properties), and set to JDK 17 (LTS). + +``` +bundled_jdk_vendor = adoptium +bundled_jdk = 17.0.2+8 +``` + +#### Custom Runtime JDK -By default, the test tasks use bundled JDK runtime, configured in `buildSrc/version.properties` and set to JDK 17 (LTS). Other kind of test tasks (integration, cluster, ... ) use the same runtime as `JAVA_HOME`. However, the build supports compiling with JDK 11 and testing on a different version of JDK runtime. To do this, set `RUNTIME_JAVA_HOME` pointing to the Java home of another JDK installation, e.g. `RUNTIME_JAVA_HOME=/usr/lib/jvm/jdk-14`. Alternatively, the runtime JDK version could be provided as the command line argument, using combination of `runtime.java=` property and `JAVA_HOME` environment variable, for example `./gradlew -Druntime.java=17 ...` (in this case, the tooling expects `JAVA17_HOME` environment variable to be set). +Other kind of test tasks (integration, cluster, etc.) use the same runtime as `JAVA_HOME`. However, the build also supports compiling with one version of JDK, and testing on a different version. To do this, set `RUNTIME_JAVA_HOME` pointing to the Java home of another JDK installation, e.g. `RUNTIME_JAVA_HOME=/usr/lib/jvm/jdk-14`. Alternatively, the runtime JDK version could be provided as the command line argument, using combination of `runtime.java=` property and `JAVA_HOME` environment variable, for example `./gradlew -Druntime.java=17 ...` (in this case, the tooling expects `JAVA17_HOME` environment variable to be set). #### Windows diff --git a/README.md b/README.md index e4bdb4d85b632..ec9cae6e432b2 100644 --- a/README.md +++ b/README.md @@ -39,3 +39,9 @@ This project is licensed under the [Apache v2.0 License](LICENSE.txt). ## Copyright Copyright OpenSearch Contributors. See [NOTICE](NOTICE.txt) for details. + +## Trademark + +OpenSearch is a registered trademark of Amazon Web Services. + +OpenSearch includes certain Apache-licensed Elasticsearch code from Elasticsearch B.V. and other source code. Elasticsearch B.V. is not the source of that other source code. ELASTICSEARCH is a registered trademark of Elasticsearch B.V. \ No newline at end of file diff --git a/TESTING.md b/TESTING.md index 5571b7c7a4aaf..4a2a786469b67 100644 --- a/TESTING.md +++ b/TESTING.md @@ -245,7 +245,7 @@ The YAML REST tests support all the options provided by the randomized runner, p - `tests.rest.suite`: comma separated paths of the test suites to be run (by default loaded from /rest-api-spec/test). It is possible to run only a subset of the tests providing a sub-folder or even a single yaml file (the default /rest-api-spec/test prefix is optional when files are loaded from classpath) e.g. `-Dtests.rest.suite=index,get,create/10_with_id` -- `tests.rest.blacklist`: comma separated globs that identify tests that are blacklisted and need to be skipped e.g. `-Dtests.rest.blacklist=index/**/Index document,get/10_basic/**` +- `tests.rest.blacklist`: comma separated globs that identify tests that are denylisted and need to be skipped e.g. `-Dtests.rest.blacklist=index/**/Index document,get/10_basic/**` Java REST tests can be run with the "javaRestTest" task. diff --git a/benchmarks/build.gradle b/benchmarks/build.gradle index 34dc7a5691e0b..faaeb33d80ff7 100644 --- a/benchmarks/build.gradle +++ b/benchmarks/build.gradle @@ -48,8 +48,8 @@ dependencies { api "org.openjdk.jmh:jmh-core:$versions.jmh" annotationProcessor "org.openjdk.jmh:jmh-generator-annprocess:$versions.jmh" // Dependencies of JMH - runtimeOnly 'net.sf.jopt-simple:jopt-simple:4.6' - runtimeOnly 'org.apache.commons:commons-math3:3.2' + runtimeOnly 'net.sf.jopt-simple:jopt-simple:5.0.4' + runtimeOnly 'org.apache.commons:commons-math3:3.6.1' } // enable the JMH's BenchmarkProcessor to generate the final benchmark classes diff --git a/build.gradle b/build.gradle index 91472a62a2bd9..374bfb3ccfae3 100644 --- a/build.gradle +++ b/build.gradle @@ -48,7 +48,7 @@ plugins { id 'lifecycle-base' id 'opensearch.docker-support' id 'opensearch.global-build-info' - id "com.diffplug.spotless" version "5.6.1" apply false + id "com.diffplug.spotless" version "6.3.0" apply false } apply from: 'gradle/build-complete.gradle' @@ -244,7 +244,7 @@ allprojects { compile.options.compilerArgs << '-Xlint:opens' compile.options.compilerArgs << '-Xlint:overloads' compile.options.compilerArgs << '-Xlint:overrides' - compile.options.compilerArgs << '-Xlint:processing' + compile.options.compilerArgs << '-Xlint:-processing' compile.options.compilerArgs << '-Xlint:rawtypes' compile.options.compilerArgs << '-Xlint:removal' compile.options.compilerArgs << '-Xlint:requires-automatic' diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 6b88134c9aaa6..ff79cc5df0df0 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -102,17 +102,17 @@ dependencies { api localGroovy() - api 'commons-codec:commons-codec:1.13' + api 'commons-codec:commons-codec:1.15' api 'org.apache.commons:commons-compress:1.21' api 'org.apache.ant:ant:1.10.12' - api 'com.netflix.nebula:gradle-extra-configurations-plugin:3.0.3' + api 'com.netflix.nebula:gradle-extra-configurations-plugin:7.0.0' api 'com.netflix.nebula:nebula-publishing-plugin:4.4.4' api 'com.netflix.nebula:gradle-info-plugin:7.1.3' api 'org.apache.rat:apache-rat:0.13' api 'commons-io:commons-io:2.7' api "net.java.dev.jna:jna:5.5.0" api 'gradle.plugin.com.github.johnrengelman:shadow:7.1.2' - api 'de.thetaphi:forbiddenapis:3.0' + api 'de.thetaphi:forbiddenapis:3.2' api 'com.avast.gradle:gradle-docker-compose-plugin:0.14.12' api 'org.apache.maven:maven-model:3.6.2' api 'com.networknt:json-schema-validator:1.0.36' @@ -124,7 +124,7 @@ dependencies { testFixturesApi gradleTestKit() testImplementation 'com.github.tomakehurst:wiremock-jre8-standalone:2.23.2' testImplementation "org.mockito:mockito-core:${props.getProperty('mockito')}" - integTestImplementation('org.spockframework:spock-core:2.0-groovy-3.0') { + integTestImplementation('org.spockframework:spock-core:2.1-groovy-3.0') { exclude module: "groovy" } } @@ -158,8 +158,8 @@ if (project != rootProject) { apply plugin: 'opensearch.publish' allprojects { - targetCompatibility = 11 - sourceCompatibility = 11 + targetCompatibility = JavaVersion.VERSION_11 + sourceCompatibility = JavaVersion.VERSION_11 } // groovydoc succeeds, but has some weird internal exception... diff --git a/buildSrc/src/integTest/java/org/opensearch/gradle/precommit/ThirdPartyAuditTaskIT.java b/buildSrc/src/integTest/java/org/opensearch/gradle/precommit/ThirdPartyAuditTaskIT.java index c658f8d18be70..4e5bbfd409f90 100644 --- a/buildSrc/src/integTest/java/org/opensearch/gradle/precommit/ThirdPartyAuditTaskIT.java +++ b/buildSrc/src/integTest/java/org/opensearch/gradle/precommit/ThirdPartyAuditTaskIT.java @@ -71,7 +71,7 @@ public void testWithEmptyRules() { "-PcompileOnlyVersion=0.0.1", "-PcompileGroup=other.gradle:dummy-io", "-PcompileVersion=0.0.1" - ).buildAndFail(); + ).build(); } public void testViolationFoundAndCompileOnlyIgnored() { diff --git a/buildSrc/src/main/groovy/org/opensearch/gradle/ResolveAllDependencies.java b/buildSrc/src/main/groovy/org/opensearch/gradle/ResolveAllDependencies.java index 2c3c8bf2629bc..63ad25a977b68 100644 --- a/buildSrc/src/main/groovy/org/opensearch/gradle/ResolveAllDependencies.java +++ b/buildSrc/src/main/groovy/org/opensearch/gradle/ResolveAllDependencies.java @@ -55,7 +55,7 @@ static boolean canBeResolved(Configuration configuration) { return false; } if (configuration instanceof org.gradle.internal.deprecation.DeprecatableConfiguration) { - var deprecatableConfiguration = (DeprecatableConfiguration) configuration; + DeprecatableConfiguration deprecatableConfiguration = (DeprecatableConfiguration) configuration; if (deprecatableConfiguration.canSafelyBeResolved() == false) { return false; } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/DistributionDownloadPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/DistributionDownloadPlugin.java index 86774265c81ad..843a7f7d2716d 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/DistributionDownloadPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/DistributionDownloadPlugin.java @@ -203,7 +203,6 @@ private static void setupDownloadServiceRepo(Project project) { "/releases" + RELEASE_PATTERN_LAYOUT, "/release-candidates" + RELEASE_PATTERN_LAYOUT ); - addIvyRepo(project, SNAPSHOT_REPO_NAME, "https://artifacts.opensearch.org", FAKE_SNAPSHOT_IVY_GROUP, SNAPSHOT_PATTERN_LAYOUT); addIvyRepo2(project, DOWNLOAD_REPO_NAME_ES, "https://artifacts-no-kpi.elastic.co", FAKE_IVY_GROUP_ES); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/LoggedExec.java b/buildSrc/src/main/java/org/opensearch/gradle/LoggedExec.java index 2831108b94452..0512ed72f5e47 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/LoggedExec.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/LoggedExec.java @@ -64,7 +64,6 @@ /** * A wrapper around gradle's Exec task to capture output and log on error. */ -@SuppressWarnings("unchecked") public class LoggedExec extends Exec implements FileSystemOperationsAware { private static final Logger LOGGER = Logging.getLogger(LoggedExec.class); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchJavaPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchJavaPlugin.java index 80850e05b8a02..c701c47f9e68c 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchJavaPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchJavaPlugin.java @@ -63,8 +63,8 @@ import org.gradle.language.base.plugins.LifecycleBasePlugin; import java.io.File; +import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.function.Consumer; import static org.opensearch.gradle.util.Util.toStringable; @@ -173,17 +173,16 @@ public static void configureCompile(Project project) { // workaround for https://github.com/gradle/gradle/issues/14141 compileTask.getConventionMapping().map("sourceCompatibility", () -> java.getSourceCompatibility().toString()); compileTask.getConventionMapping().map("targetCompatibility", () -> java.getTargetCompatibility().toString()); - compileOptions.getRelease().set(releaseVersionProviderFromCompileTask(project, compileTask)); + // The '--release is available from JDK-9 and above + if (BuildParams.getRuntimeJavaVersion().compareTo(JavaVersion.VERSION_1_8) > 0) { + compileOptions.getRelease().set(releaseVersionProviderFromCompileTask(project, compileTask)); + } }); // also apply release flag to groovy, which is used in build-tools - project.getTasks() - .withType(GroovyCompile.class) - .configureEach( - compileTask -> { - // TODO: this probably shouldn't apply to groovy at all? - compileTask.getOptions().getRelease().set(releaseVersionProviderFromCompileTask(project, compileTask)); - } - ); + project.getTasks().withType(GroovyCompile.class).configureEach(compileTask -> { + // TODO: this probably shouldn't apply to groovy at all? + compileTask.getOptions().getRelease().set(releaseVersionProviderFromCompileTask(project, compileTask)); + }); }); } @@ -205,50 +204,39 @@ public static void configureInputNormalization(Project project) { * Adds additional manifest info to jars */ static void configureJars(Project project) { - project.getTasks() - .withType(Jar.class) - .configureEach( - jarTask -> { - // we put all our distributable files under distributions - jarTask.getDestinationDirectory().set(new File(project.getBuildDir(), "distributions")); - // fixup the jar manifest - // Explicitly using an Action interface as java lambdas - // are not supported by Gradle up-to-date checks - jarTask.doFirst(new Action() { - @Override - public void execute(Task task) { - // this doFirst is added before the info plugin, therefore it will run - // after the doFirst added by the info plugin, and we can override attributes - jarTask.getManifest() - .attributes( - Map.of( - "Build-Date", - BuildParams.getBuildDate(), - "Build-Java-Version", - BuildParams.getGradleJavaVersion() - ) - ); + project.getTasks().withType(Jar.class).configureEach(jarTask -> { + // we put all our distributable files under distributions + jarTask.getDestinationDirectory().set(new File(project.getBuildDir(), "distributions")); + // fixup the jar manifest + // Explicitly using an Action interface as java lambdas + // are not supported by Gradle up-to-date checks + jarTask.doFirst(new Action() { + @Override + public void execute(Task task) { + // this doFirst is added before the info plugin, therefore it will run + // after the doFirst added by the info plugin, and we can override attributes + jarTask.getManifest().attributes(new HashMap() { + { + put("Build-Date", BuildParams.getBuildDate()); + put("Build-Java-Version", BuildParams.getGradleJavaVersion()); } }); } - ); + }); + }); project.getPluginManager().withPlugin("com.github.johnrengelman.shadow", p -> { - project.getTasks() - .withType(ShadowJar.class) - .configureEach( - shadowJar -> { - /* - * Replace the default "-all" classifier with null - * which will leave the classifier off of the file name. - */ - shadowJar.getArchiveClassifier().set((String) null); - /* - * Not all cases need service files merged but it is - * better to be safe - */ - shadowJar.mergeServiceFiles(); - } - ); + project.getTasks().withType(ShadowJar.class).configureEach(shadowJar -> { + /* + * Replace the default "-all" classifier with null + * which will leave the classifier off of the file name. + */ + shadowJar.getArchiveClassifier().set((String) null); + /* + * Not all cases need service files merged but it is + * better to be safe + */ + shadowJar.mergeServiceFiles(); + }); // Add "original" classifier to the non-shadowed JAR to distinguish it from the shadow JAR project.getTasks().named(JavaPlugin.JAR_TASK_NAME, Jar.class).configure(jar -> jar.getArchiveClassifier().set("original")); // Make sure we assemble the shadow jar @@ -282,7 +270,9 @@ private static void configureJavadoc(Project project) { * that the default will change to html5 in the future. */ CoreJavadocOptions javadocOptions = (CoreJavadocOptions) javadoc.getOptions(); - javadocOptions.addBooleanOption("html5", true); + if (BuildParams.getRuntimeJavaVersion().compareTo(JavaVersion.VERSION_1_8) > 0) { + javadocOptions.addBooleanOption("html5", true); + } }); TaskProvider javadoc = project.getTasks().withType(Javadoc.class).named("javadoc"); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchTestBasePlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchTestBasePlugin.java index 8a972bfa37e78..9d6e78014916d 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchTestBasePlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchTestBasePlugin.java @@ -49,6 +49,7 @@ import org.gradle.api.tasks.testing.Test; import java.io.File; +import java.util.HashMap; import java.util.Map; import static org.opensearch.gradle.util.FileUtils.mkdirs; @@ -95,7 +96,7 @@ public void apply(Project project) { // We specifically use an anonymous inner class here because lambda task actions break Gradle cacheability // See: https://docs.gradle.org/current/userguide/more_about_tasks.html#sec:how_does_it_work - test.doFirst(new Action<>() { + test.doFirst(new Action() { @Override public void execute(Task t) { mkdirs(testOutputDir); @@ -137,20 +138,16 @@ public void execute(Task t) { test.jvmArgs("-ea", "-esa"); } - Map sysprops = Map.of( - "java.awt.headless", - "true", - "tests.gradle", - "true", - "tests.artifact", - project.getName(), - "tests.task", - test.getPath(), - "tests.security.manager", - "true", - "jna.nosys", - "true" - ); + Map sysprops = new HashMap() { + { + put("java.awt.headless", "true"); + put("tests.gradle", "true"); + put("tests.artifact", project.getName()); + put("tests.task", test.getPath()); + put("tests.security.manager", "true"); + put("jna.nosys", "true"); + } + }; test.systemProperties(sysprops); // ignore changing test seed when build is passed -Dignore.tests.seed for cacheability experimentation diff --git a/buildSrc/src/main/java/org/opensearch/gradle/ReaperPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/ReaperPlugin.java index d5143f43ab70e..16e5cba4b5b23 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/ReaperPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/ReaperPlugin.java @@ -56,6 +56,7 @@ public void apply(Project project) { .resolve(".gradle") .resolve("reaper") .resolve("build-" + ProcessHandle.current().pid()); + ReaperService service = project.getExtensions() .create("reaper", ReaperService.class, project, project.getBuildDir().toPath(), inputDir); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/docker/DockerSupportService.java b/buildSrc/src/main/java/org/opensearch/gradle/docker/DockerSupportService.java index 2cb977117858b..2eb2852e3e55e 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/docker/DockerSupportService.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/docker/DockerSupportService.java @@ -49,6 +49,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -280,7 +281,7 @@ static Map parseOsRelease(final List osReleaseLines) { */ private Optional getDockerPath() { // Check if the Docker binary exists - return List.of(DOCKER_BINARIES).stream().filter(path -> new File(path).exists()).findFirst(); + return Arrays.asList(DOCKER_BINARIES).stream().filter(path -> new File(path).exists()).findFirst(); } /** @@ -291,7 +292,7 @@ private Optional getDockerPath() { */ private Optional getDockerComposePath() { // Check if the Docker binary exists - return List.of(DOCKER_COMPOSE_BINARIES).stream().filter(path -> new File(path).exists()).findFirst(); + return Arrays.asList(DOCKER_COMPOSE_BINARIES).stream().filter(path -> new File(path).exists()).findFirst(); } private void throwDockerRequiredException(final String message) { diff --git a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalBwcGitPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalBwcGitPlugin.java index 13aa2e43af313..11270e5c9a51d 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalBwcGitPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalBwcGitPlugin.java @@ -109,7 +109,7 @@ public void apply(Project project) { }); TaskProvider fetchLatestTaskProvider = tasks.register("fetchLatest", LoggedExec.class, fetchLatest -> { - var gitFetchLatest = project.getProviders() + Provider gitFetchLatest = project.getProviders() .systemProperty("tests.bwc.git_fetch_latest") .forUseAtConfigurationTime() .orElse("true") @@ -122,7 +122,7 @@ public void apply(Project project) { } throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + fetchProp + "]"); }); - fetchLatest.onlyIf(t -> project.getGradle().getStartParameter().isOffline() == false && gitFetchLatest.get()); + fetchLatest.onlyIf(t -> project.getGradle().getStartParameter().isOffline() == false && gitFetchLatest.get() != null); fetchLatest.dependsOn(addRemoteTaskProvider); fetchLatest.setWorkingDir(gitExtension.getCheckoutDir().get()); fetchLatest.setCommandLine(asList("git", "fetch", "--all")); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java index e0acdd11a6f76..8adfbff424278 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java @@ -39,6 +39,7 @@ import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.Task; +import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; import org.gradle.api.plugins.BasePlugin; import org.gradle.api.logging.Logger; @@ -46,6 +47,7 @@ import org.gradle.api.tasks.AbstractCopyTask; import org.gradle.api.tasks.Sync; import org.gradle.api.tasks.TaskContainer; +import org.gradle.api.tasks.TaskProvider; import org.gradle.api.tasks.bundling.AbstractArchiveTask; import org.gradle.api.tasks.bundling.Compression; import org.gradle.api.tasks.bundling.Zip; @@ -105,14 +107,18 @@ private Action configure(String name) { private void registerAndConfigureDistributionArchivesExtension(Project project) { container = project.container(DistributionArchive.class, name -> { - var subProjectDir = archiveToSubprojectName(name); - var copyDistributionTaskName = "build" + capitalize(name.substring(0, name.length() - 3)); + String subProjectDir = archiveToSubprojectName(name); + String copyDistributionTaskName = "build" + capitalize(name.substring(0, name.length() - 3)); TaskContainer tasks = project.getTasks(); - var explodedDist = tasks.register(copyDistributionTaskName, Sync.class, sync -> sync.into(subProjectDir + "/build/install/")); + TaskProvider explodedDist = tasks.register( + copyDistributionTaskName, + Sync.class, + sync -> sync.into(subProjectDir + "/build/install/") + ); explodedDist.configure(configure(name)); - var archiveTaskName = "build" + capitalize(name); + String archiveTaskName = "build" + capitalize(name); - var archiveTask = name.endsWith("Tar") + TaskProvider archiveTask = name.endsWith("Tar") ? tasks.register(archiveTaskName, SymbolicLinkPreservingTar.class) : tasks.register(archiveTaskName, Zip.class); archiveTask.configure(configure(name)); @@ -122,11 +128,11 @@ private void registerAndConfigureDistributionArchivesExtension(Project project) // Each defined distribution archive is linked to a subproject. // A distribution archive definition not matching a sub project will result in build failure. container.whenObjectAdded(distributionArchive -> { - var subProjectName = archiveToSubprojectName(distributionArchive.getName()); + String subProjectName = archiveToSubprojectName(distributionArchive.getName()); project.project(subProjectName, sub -> { sub.getPlugins().apply(BasePlugin.class); sub.getArtifacts().add(DEFAULT_CONFIGURATION_NAME, distributionArchive.getArchiveTask()); - var extractedConfiguration = sub.getConfigurations().create("extracted"); + Configuration extractedConfiguration = sub.getConfigurations().create("extracted"); extractedConfiguration.setCanBeResolved(false); extractedConfiguration.getAttributes().attribute(ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); sub.getArtifacts().add(EXTRACTED_CONFIGURATION_NAME, distributionArchive.getExpandedDistTask()); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ForbiddenApisPrecommitPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ForbiddenApisPrecommitPlugin.java index 684710a4c23ba..328edda8b1787 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ForbiddenApisPrecommitPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ForbiddenApisPrecommitPlugin.java @@ -48,8 +48,9 @@ import java.nio.file.Path; import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; import java.util.List; -import java.util.Set; public class ForbiddenApisPrecommitPlugin extends PrecommitPlugin { @Override @@ -90,14 +91,14 @@ public TaskProvider createTask(Project project) { // TODO: forbidden apis does not yet support java 15, rethink using runtime version t.setTargetCompatibility(JavaVersion.VERSION_14.getMajorVersion()); } - t.setBundledSignatures(Set.of("jdk-unsafe", "jdk-deprecated", "jdk-non-portable", "jdk-system-out")); + t.setBundledSignatures(new HashSet<>(Arrays.asList("jdk-unsafe", "jdk-deprecated", "jdk-non-portable", "jdk-system-out"))); t.setSignaturesFiles( project.files( resourcesDir.resolve("forbidden/jdk-signatures.txt"), resourcesDir.resolve("forbidden/opensearch-all-signatures.txt") ) ); - t.setSuppressAnnotations(Set.of("**.SuppressForbidden")); + t.setSuppressAnnotations(new HashSet<>(Arrays.asList("**.SuppressForbidden"))); if (t.getName().endsWith("Test")) { t.setSignaturesFiles( t.getSignaturesFiles() diff --git a/buildSrc/src/main/java/org/opensearch/gradle/precommit/PrecommitPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/precommit/PrecommitPlugin.java index 183148f3e1bef..0b4cc20f145ad 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/precommit/PrecommitPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/precommit/PrecommitPlugin.java @@ -53,16 +53,12 @@ public final void apply(Project project) { TaskProvider precommit = project.getTasks().named(PRECOMMIT_TASK_NAME); precommit.configure(t -> t.dependsOn(task)); - project.getPluginManager() - .withPlugin( - "java", - p -> { - // We want to get any compilation error before running the pre-commit checks. - for (SourceSet sourceSet : GradleUtils.getJavaSourceSets(project)) { - task.configure(t -> t.shouldRunAfter(sourceSet.getClassesTaskName())); - } - } - ); + project.getPluginManager().withPlugin("java", p -> { + // We want to get any compilation error before running the pre-commit checks. + for (SourceSet sourceSet : GradleUtils.getJavaSourceSets(project)) { + task.configure(t -> t.shouldRunAfter(sourceSet.getClassesTaskName())); + } + }); } public abstract TaskProvider createTask(Project project); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/precommit/PrecommitTaskPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/precommit/PrecommitTaskPlugin.java index 43e7d5bf69581..52cbdbded2f13 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/precommit/PrecommitTaskPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/precommit/PrecommitTaskPlugin.java @@ -56,18 +56,14 @@ public void apply(Project project) { "lifecycle-base", p -> project.getTasks().named(LifecycleBasePlugin.CHECK_TASK_NAME).configure(t -> t.dependsOn(precommit)) ); - project.getPluginManager() - .withPlugin( - "java", - p -> { - // run compilation as part of precommit - for (SourceSet sourceSet : GradleUtils.getJavaSourceSets(project)) { - precommit.configure(t -> t.dependsOn(sourceSet.getClassesTaskName())); - } + project.getPluginManager().withPlugin("java", p -> { + // run compilation as part of precommit + for (SourceSet sourceSet : GradleUtils.getJavaSourceSets(project)) { + precommit.configure(t -> t.dependsOn(sourceSet.getClassesTaskName())); + } - // make sure tests run after all precommit tasks - project.getTasks().withType(Test.class).configureEach(t -> t.mustRunAfter(precommit)); - } - ); + // make sure tests run after all precommit tasks + project.getTasks().withType(Test.class).configureEach(t -> t.mustRunAfter(precommit)); + }); } } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditPrecommitPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditPrecommitPlugin.java index 9cf58b25c58d2..5d707ce2b9f28 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditPrecommitPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditPrecommitPlugin.java @@ -51,7 +51,7 @@ public class ThirdPartyAuditPrecommitPlugin extends PrecommitPlugin { public TaskProvider createTask(Project project) { project.getPlugins().apply(CompileOnlyResolvePlugin.class); project.getConfigurations().create("forbiddenApisCliJar"); - project.getDependencies().add("forbiddenApisCliJar", "de.thetaphi:forbiddenapis:2.7"); + project.getDependencies().add("forbiddenApisCliJar", "de.thetaphi:forbiddenapis:3.2"); Configuration jdkJarHellConfig = project.getConfigurations().create(JDK_JAR_HELL_CONFIG_NAME); if (BuildParams.isInternal() && project.getPath().equals(":libs:opensearch-core") == false) { diff --git a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ValidateJsonNoKeywordsTask.java b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ValidateJsonNoKeywordsTask.java index 96e98ae9d980c..b3ac804566e29 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ValidateJsonNoKeywordsTask.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ValidateJsonNoKeywordsTask.java @@ -51,6 +51,7 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.io.PrintWriter; +import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; @@ -126,14 +127,17 @@ public void validate(InputChanges inputChanges) { final JsonNode jsonNode = mapper.readTree(file); if (jsonNode.isObject() == false) { - errors.put(file, Set.of("Expected an object, but found: " + jsonNode.getNodeType())); + errors.put(file, new HashSet<>(Arrays.asList("Expected an object, but found: " + jsonNode.getNodeType()))); return; } final ObjectNode rootNode = (ObjectNode) jsonNode; if (rootNode.size() != 1) { - errors.put(file, Set.of("Expected an object with exactly 1 key, but found " + rootNode.size() + " keys")); + errors.put( + file, + new HashSet<>(Arrays.asList("Expected an object with exactly 1 key, but found " + rootNode.size() + " keys")) + ); return; } @@ -148,7 +152,7 @@ public void validate(InputChanges inputChanges) { } } } catch (IOException e) { - errors.put(file, Set.of("Failed to load file: " + e.getMessage())); + errors.put(file, new HashSet<>(Arrays.asList("Failed to load file: " + e.getMessage()))); } }); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java index 68fab04c2217f..a77155aacf723 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java @@ -70,6 +70,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Map.Entry; import java.util.function.Supplier; import java.util.stream.Stream; @@ -204,7 +205,7 @@ public void apply(Project project) { vmDependencies ); } else { - for (var entry : linuxTestTasks.entrySet()) { + for (Entry>> entry : linuxTestTasks.entrySet()) { OpenSearchDistribution.Type type = entry.getKey(); TaskProvider vmLifecycleTask = vmLifecyleTasks.get(type); configureVMWrapperTasks(vmProject, entry.getValue(), depsTasks, wrapperTask -> { @@ -227,7 +228,7 @@ public void apply(Project project) { }, vmDependencies); } - for (var entry : upgradeTestTasks.entrySet()) { + for (Entry>> entry : upgradeTestTasks.entrySet()) { String version = entry.getKey(); TaskProvider vmVersionTask = vmVersionTasks.get(version); configureVMWrapperTasks( @@ -321,7 +322,12 @@ private static Object convertPath( private static Configuration configureExamplePlugin(Project project) { Configuration examplePlugin = project.getConfigurations().create(EXAMPLE_PLUGIN_CONFIGURATION); DependencyHandler deps = project.getDependencies(); - Map examplePluginProject = Map.of("path", ":example-plugins:custom-settings", "configuration", "zip"); + Map examplePluginProject = new HashMap() { + { + put("path", ":example-plugins:custom-settings"); + put("configuration", "zip"); + } + }; deps.add(EXAMPLE_PLUGIN_CONFIGURATION, deps.project(examplePluginProject)); return examplePlugin; } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestApiTask.java b/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestApiTask.java index 5e8194556e98f..399cd39d236d7 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestApiTask.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestApiTask.java @@ -164,7 +164,7 @@ void copy() { getFileSystemOperations().copy(c -> { c.from(getArchiveOperations().zipTree(coreConfig.getSingleFile())); // this ends up as the same dir as outputDir - c.into(Objects.requireNonNull(getSourceSet().orElseThrow().getOutput().getResourcesDir())); + c.into(Objects.requireNonNull(getSourceSet().get().getOutput().getResourcesDir())); if (includeCore.get().isEmpty()) { c.include(REST_API_PREFIX + "/**"); } else { diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestTestsTask.java b/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestTestsTask.java index 8204aea1ae8ba..56ce449f4cf6f 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestTestsTask.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/test/rest/CopyRestTestsTask.java @@ -155,7 +155,7 @@ void copy() { getFileSystemOperations().copy(c -> { c.from(getArchiveOperations().zipTree(coreConfig.getSingleFile())); // this ends up as the same dir as outputDir - c.into(Objects.requireNonNull(getSourceSet().orElseThrow().getOutput().getResourcesDir())); + c.into(Objects.requireNonNull(getSourceSet().get().getOutput().getResourcesDir())); c.include( includeCore.get().stream().map(prefix -> REST_TEST_PREFIX + "/" + prefix + "*/**").collect(Collectors.toList()) ); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/rest/RestResourcesPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/test/rest/RestResourcesPlugin.java index 1b0b6953d1507..728e36ce98bff 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/test/rest/RestResourcesPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/test/rest/RestResourcesPlugin.java @@ -41,7 +41,7 @@ import org.gradle.api.tasks.SourceSet; import org.gradle.api.tasks.SourceSetContainer; -import java.util.Map; +import java.util.HashMap; /** *

@@ -88,8 +88,12 @@ public void apply(Project project) { task.sourceSetName = SourceSet.TEST_SOURCE_SET_NAME; if (BuildParams.isInternal()) { // core - Dependency restTestdependency = project.getDependencies() - .project(Map.of("path", ":rest-api-spec", "configuration", "restTests")); + Dependency restTestdependency = project.getDependencies().project(new HashMap() { + { + put("path", ":rest-api-spec"); + put("configuration", "restTests"); + } + }); project.getDependencies().add(task.coreConfig.getName(), restTestdependency); } else { Dependency dependency = project.getDependencies() @@ -109,8 +113,12 @@ public void apply(Project project) { task.coreConfig = specConfig; task.sourceSetName = SourceSet.TEST_SOURCE_SET_NAME; if (BuildParams.isInternal()) { - Dependency restSpecDependency = project.getDependencies() - .project(Map.of("path", ":rest-api-spec", "configuration", "restSpecs")); + Dependency restSpecDependency = project.getDependencies().project(new HashMap() { + { + put("path", ":rest-api-spec"); + put("configuration", "restSpecs"); + } + }); project.getDependencies().add(task.coreConfig.getName(), restSpecDependency); } else { Dependency dependency = project.getDependencies() diff --git a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java index 9e6984fd45007..a94ebacd460a5 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java @@ -404,11 +404,6 @@ public void upgradeAllNodesAndPluginsToNextVersion(List> p writeUnicastHostsFiles(); } - public void fullRestart() { - stop(false); - start(); - } - public void nextNodeToNextVersion() { OpenSearchNode node = upgradeNodeToNextVersion(); node.start(); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchNode.java b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchNode.java index a99b118f7176d..b051c15e81d6d 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchNode.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchNode.java @@ -385,8 +385,12 @@ Collection getPluginAndModuleConfigurations() { private Provider maybeCreatePluginOrModuleDependency(String path) { Configuration configuration = pluginAndModuleConfigurations.computeIfAbsent( path, - key -> project.getConfigurations() - .detachedConfiguration(project.getDependencies().project(Map.of("path", path, "configuration", "zip"))) + key -> project.getConfigurations().detachedConfiguration(project.getDependencies().project(new HashMap() { + { + put("path", path); + put("configuration", "zip"); + } + })) ); Provider fileProvider = configuration.getElements() .map( @@ -679,10 +683,6 @@ void goToNextVersion() { setting("node.attr.upgraded", "true"); } - private boolean isSettingTrue(String name) { - return Boolean.valueOf(settings.getOrDefault(name, "false").toString()); - } - private void copyExtraConfigFiles() { if (extraConfigFiles.isEmpty() == false) { logToProcessStdout("Setting up " + extraConfigFiles.size() + " additional config files"); @@ -977,7 +977,7 @@ public synchronized void stop(boolean tailLogs) { LOGGER.info("Stopping `{}`, tailLogs: {}", this, tailLogs); requireNonNull(opensearchProcess, "Can't stop `" + this + "` as it was not started or already stopped."); // Test clusters are not reused, don't spend time on a graceful shutdown - stopHandle(opensearchProcess.toHandle(), true); + stopProcess(opensearchProcess.toHandle(), true); reaper.unregister(toString()); if (tailLogs) { logFileContents("Standard output of node", currentConfig.stdoutFile); @@ -1002,7 +1002,7 @@ public void setNameCustomization(Function nameCustomizer) { this.nameCustomization = nameCustomizer; } - private void stopHandle(ProcessHandle processHandle, boolean forcibly) { + private void stopProcess(ProcessHandle processHandle, boolean forcibly) { // No-op if the process has already exited by itself. if (processHandle.isAlive() == false) { LOGGER.info("Process was not running when we tried to terminate it."); @@ -1041,7 +1041,12 @@ private void stopHandle(ProcessHandle processHandle, boolean forcibly) { throw new TestClustersException("Was not able to terminate " + currentConfig.command + " process for " + this); } } finally { - children.forEach(each -> stopHandle(each, forcibly)); + children.forEach(each -> stopProcess(each, forcibly)); + } + + waitForProcessToExit(processHandle); + if (processHandle.isAlive()) { + throw new TestClustersException("Was not able to terminate " + currentConfig.command + " process for " + this); } } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/TestClusterConfiguration.java b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/TestClusterConfiguration.java index a46e6ca876b35..b27f205291269 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/TestClusterConfiguration.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/TestClusterConfiguration.java @@ -172,11 +172,9 @@ default void waitForConditions( } else { String extraCause = ""; Throwable cause = lastException; - int ident = 2; while (cause != null) { if (cause.getMessage() != null && cause.getMessage().isEmpty() == false) { extraCause += "\n" + " " + cause.getMessage(); - ident += 2; } cause = cause.getCause(); } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/testfixtures/TestFixturesPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/testfixtures/TestFixturesPlugin.java index 1a55052f53004..ae1db26fbc48d 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/testfixtures/TestFixturesPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/testfixtures/TestFixturesPlugin.java @@ -64,9 +64,9 @@ import java.io.IOException; import java.io.UncheckedIOException; import java.nio.file.Files; +import java.util.Arrays; import java.util.Collections; import java.util.function.BiConsumer; -import java.util.List; import java.util.Optional; public class TestFixturesPlugin implements Plugin { @@ -162,7 +162,7 @@ public void execute(Task task) { final Integer timeout = ext.has("dockerComposeHttpTimeout") ? (Integer) ext.get("dockerComposeHttpTimeout") : 120; composeExtension.getEnvironment().put("COMPOSE_HTTP_TIMEOUT", timeout); - Optional dockerCompose = List.of(DOCKER_COMPOSE_BINARIES) + Optional dockerCompose = Arrays.asList(DOCKER_COMPOSE_BINARIES) .stream() .filter(path -> project.file(path).exists()) .findFirst(); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/transform/SymbolicLinkPreservingUntarTransform.java b/buildSrc/src/main/java/org/opensearch/gradle/transform/SymbolicLinkPreservingUntarTransform.java index 162121db97797..5ff8168a9bed2 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/transform/SymbolicLinkPreservingUntarTransform.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/transform/SymbolicLinkPreservingUntarTransform.java @@ -94,6 +94,5 @@ public void unpack(File tarFile, File targetDir) throws IOException { entry = tar.getNextTarEntry(); } } - } } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/util/GradleUtils.java b/buildSrc/src/main/java/org/opensearch/gradle/util/GradleUtils.java index 01622e6df3ee1..054f01788d126 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/util/GradleUtils.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/util/GradleUtils.java @@ -55,6 +55,7 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -168,7 +169,11 @@ public static void setupIdeForTestSourceSet(Project project, SourceSet testSourc project.getPluginManager().withPlugin("idea", p -> { IdeaModel idea = project.getExtensions().getByType(IdeaModel.class); idea.getModule().setTestSourceDirs(testSourceSet.getJava().getSrcDirs()); - idea.getModule().getScopes().put(testSourceSet.getName(), Map.of("plus", List.of(runtimeClasspathConfiguration))); + idea.getModule().getScopes().put(testSourceSet.getName(), new HashMap>() { + { + put("plus", Arrays.asList(runtimeClasspathConfiguration)); + } + }); }); project.getPluginManager().withPlugin("eclipse", p -> { EclipseModel eclipse = project.getExtensions().getByType(EclipseModel.class); diff --git a/buildSrc/src/main/resources/minimumRuntimeVersion b/buildSrc/src/main/resources/minimumRuntimeVersion index 468437494697b..9d607966b721a 100644 --- a/buildSrc/src/main/resources/minimumRuntimeVersion +++ b/buildSrc/src/main/resources/minimumRuntimeVersion @@ -1 +1 @@ -1.8 \ No newline at end of file +11 \ No newline at end of file diff --git a/buildSrc/src/test/java/org/opensearch/gradle/docker/DockerSupportServiceTests.java b/buildSrc/src/test/java/org/opensearch/gradle/docker/DockerSupportServiceTests.java index e1891acc74dd5..e2acf3c087ecc 100644 --- a/buildSrc/src/test/java/org/opensearch/gradle/docker/DockerSupportServiceTests.java +++ b/buildSrc/src/test/java/org/opensearch/gradle/docker/DockerSupportServiceTests.java @@ -33,6 +33,7 @@ import org.opensearch.gradle.test.GradleIntegrationTestCase; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -44,7 +45,7 @@ public class DockerSupportServiceTests extends GradleIntegrationTestCase { public void testParseOsReleaseOnOracle() { - final List lines = List.of( + final List lines = Arrays.asList( "NAME=\"Oracle Linux Server\"", "VERSION=\"6.10\"", "ID=\"ol\"", @@ -85,11 +86,15 @@ public void testParseOsReleaseOnOracle() { * Trailing whitespace should be removed */ public void testRemoveTrailingWhitespace() { - final List lines = List.of("NAME=\"Oracle Linux Server\" "); + final List lines = Arrays.asList("NAME=\"Oracle Linux Server\" "); final Map results = parseOsRelease(lines); - final Map expected = Map.of("NAME", "oracle linux server"); + final Map expected = new HashMap() { + { + put("NAME", "oracle linux server"); + } + }; assertThat(expected, equalTo(results)); } @@ -98,11 +103,15 @@ public void testRemoveTrailingWhitespace() { * Comments should be removed */ public void testRemoveComments() { - final List lines = List.of("# A comment", "NAME=\"Oracle Linux Server\""); + final List lines = Arrays.asList("# A comment", "NAME=\"Oracle Linux Server\""); final Map results = parseOsRelease(lines); - final Map expected = Map.of("NAME", "oracle linux server"); + final Map expected = new HashMap() { + { + put("NAME", "oracle linux server"); + } + }; assertThat(expected, equalTo(results)); } diff --git a/buildSrc/src/testKit/testingConventions/build.gradle b/buildSrc/src/testKit/testingConventions/build.gradle index 309a9d64d4170..418e833e8cb14 100644 --- a/buildSrc/src/testKit/testingConventions/build.gradle +++ b/buildSrc/src/testKit/testingConventions/build.gradle @@ -21,7 +21,7 @@ allprojects { mavenCentral() } dependencies { - testImplementation "junit:junit:4.13.1" + testImplementation "junit:junit:4.13.2" } ext.licenseFile = file("$buildDir/dummy/license") diff --git a/buildSrc/src/testKit/thirdPartyAudit/build.gradle b/buildSrc/src/testKit/thirdPartyAudit/build.gradle index 21b0fc3e74f57..41e699db94dcf 100644 --- a/buildSrc/src/testKit/thirdPartyAudit/build.gradle +++ b/buildSrc/src/testKit/thirdPartyAudit/build.gradle @@ -40,7 +40,7 @@ repositories { } dependencies { - forbiddenApisCliJar 'de.thetaphi:forbiddenapis:2.7' + forbiddenApisCliJar 'de.thetaphi:forbiddenapis:3.2' jdkJarHell 'org.opensearch:opensearch-core:current' compileOnly "org.${project.properties.compileOnlyGroup}:${project.properties.compileOnlyVersion}" implementation "org.${project.properties.compileGroup}:${project.properties.compileVersion}" diff --git a/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle b/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle index 9d175ed65715d..c8c89fb5e4273 100644 --- a/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle +++ b/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle @@ -16,7 +16,7 @@ repositories { mavenCentral() } dependencies { - implementation 'org.apache.logging.log4j:log4j-core:2.11.1' + implementation 'org.apache.logging.log4j:log4j-core:2.17.2' } ["0.0.1", "0.0.2"].forEach { v -> diff --git a/client/benchmark/build.gradle b/client/benchmark/build.gradle index 58d932702ba8b..4aa4d7171e366 100644 --- a/client/benchmark/build.gradle +++ b/client/benchmark/build.gradle @@ -43,7 +43,7 @@ mainClassName = 'org.opensearch.client.benchmark.BenchmarkMain' test.enabled = false dependencies { - api 'org.apache.commons:commons-math3:3.2' + api 'org.apache.commons:commons-math3:3.6.1' api project(":client:rest") // bottleneck should be the client, not OpenSearch diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/opensearch/plugin/noop/action/bulk/RestNoopBulkAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/opensearch/plugin/noop/action/bulk/RestNoopBulkAction.java index 8f42744aeb5c9..1e94939bed7b5 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/opensearch/plugin/noop/action/bulk/RestNoopBulkAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/opensearch/plugin/noop/action/bulk/RestNoopBulkAction.java @@ -67,9 +67,7 @@ public List routes() { new Route(POST, "/_noop_bulk"), new Route(PUT, "/_noop_bulk"), new Route(POST, "/{index}/_noop_bulk"), - new Route(PUT, "/{index}/_noop_bulk"), - new Route(POST, "/{index}/{type}/_noop_bulk"), - new Route(PUT, "/{index}/{type}/_noop_bulk") + new Route(PUT, "/{index}/_noop_bulk") ) ); } @@ -83,7 +81,6 @@ public String getName() { public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { BulkRequest bulkRequest = Requests.bulkRequest(); String defaultIndex = request.param("index"); - String defaultType = request.param("type"); String defaultRouting = request.param("routing"); String defaultPipeline = request.param("pipeline"); Boolean defaultRequireAlias = request.paramAsBoolean("require_alias", null); @@ -97,7 +94,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC bulkRequest.add( request.requiredContent(), defaultIndex, - defaultType, defaultRouting, null, defaultPipeline, @@ -117,7 +113,7 @@ private static class BulkRestBuilderListener extends RestBuilderListener routes() { new Route(GET, "/_noop_search"), new Route(POST, "/_noop_search"), new Route(GET, "/{index}/_noop_search"), - new Route(POST, "/{index}/_noop_search"), - new Route(GET, "/{index}/{type}/_noop_search"), - new Route(POST, "/{index}/{type}/_noop_search") + new Route(POST, "/{index}/_noop_search") ) ); } diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index 4144186ba5f70..07147ce81b72e 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -92,7 +92,7 @@ check.dependsOn(asyncIntegTest) testClusters.all { testDistribution = 'ARCHIVE' systemProperty 'opensearch.scripting.update.ctx_in_params', 'false' - setting 'reindex.remote.whitelist', '[ "[::1]:*", "127.0.0.1:*" ]' + setting 'reindex.remote.allowlist', '[ "[::1]:*", "127.0.0.1:*" ]' extraConfigFile 'roles.yml', file('roles.yml') user username: System.getProperty('tests.rest.cluster.username', 'test_user'), diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/opensearch/client/IndicesClient.java index 00b07fc4881bd..9b4586ec6bf89 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/IndicesClient.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/IndicesClient.java @@ -361,60 +361,6 @@ public Cancellable dataStreamsStatsAsync( ); } - /** - * Creates an index using the Create Index API. - * - * @param createIndexRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * - * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The - * method {@link #create(CreateIndexRequest, RequestOptions)} should be used instead, which accepts a new - * request object. - */ - @Deprecated - public org.opensearch.action.admin.indices.create.CreateIndexResponse create( - org.opensearch.action.admin.indices.create.CreateIndexRequest createIndexRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - createIndexRequest, - IndicesRequestConverters::createIndex, - options, - org.opensearch.action.admin.indices.create.CreateIndexResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously creates an index using the Create Index API. - * - * @param createIndexRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * - * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The - * method {@link #createAsync(CreateIndexRequest, RequestOptions, ActionListener)} should be used instead, - * which accepts a new request object. - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable createAsync( - org.opensearch.action.admin.indices.create.CreateIndexRequest createIndexRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - createIndexRequest, - IndicesRequestConverters::createIndex, - options, - org.opensearch.action.admin.indices.create.CreateIndexResponse::fromXContent, - listener, - emptySet() - ); - } - /** * Updates the mappings on an index using the Put Mapping API. * @@ -456,59 +402,6 @@ public Cancellable putMappingAsync( ); } - /** - * Updates the mappings on an index using the Put Mapping API. - * - * @param putMappingRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * - * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The method - * {@link #putMapping(PutMappingRequest, RequestOptions)} should be used instead, which accepts a new request object. - */ - @Deprecated - public AcknowledgedResponse putMapping( - org.opensearch.action.admin.indices.mapping.put.PutMappingRequest putMappingRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - putMappingRequest, - IndicesRequestConverters::putMapping, - options, - AcknowledgedResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously updates the mappings on an index using the Put Mapping API. - * - * @param putMappingRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * - * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The - * method {@link #putMappingAsync(PutMappingRequest, RequestOptions, ActionListener)} should be used instead, - * which accepts a new request object. - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable putMappingAsync( - org.opensearch.action.admin.indices.mapping.put.PutMappingRequest putMappingRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - putMappingRequest, - IndicesRequestConverters::putMapping, - options, - AcknowledgedResponse::fromXContent, - listener, - emptySet() - ); - } - /** * Retrieves the mappings on an index or indices using the Get Mapping API. * @@ -550,114 +443,6 @@ public Cancellable getMappingAsync( ); } - /** - * Retrieves the mappings on an index or indices using the Get Mapping API. - * - * @param getMappingsRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * - * @deprecated This method uses old request and response objects which still refer to types, a deprecated - * feature. The method {@link #getMapping(GetMappingsRequest, RequestOptions)} should be used instead, which - * accepts a new request object. - */ - @Deprecated - public org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse getMapping( - org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest getMappingsRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - getMappingsRequest, - IndicesRequestConverters::getMappings, - options, - org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously retrieves the mappings on an index on indices using the Get Mapping API. - * - * @param getMappingsRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * - * @deprecated This method uses old request and response objects which still refer to types, a deprecated feature. - * The method {@link #getMapping(GetMappingsRequest, RequestOptions)} should be used instead, which accepts a new - * request object. - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable getMappingAsync( - org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest getMappingsRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - getMappingsRequest, - IndicesRequestConverters::getMappings, - options, - org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse::fromXContent, - listener, - emptySet() - ); - } - - /** - * Retrieves the field mappings on an index or indices using the Get Field Mapping API. - * - * @param getFieldMappingsRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * - * @deprecated This method uses old request and response objects which still refer to types, a deprecated feature. - * The method {@link #getFieldMapping(GetFieldMappingsRequest, RequestOptions)} should be used instead, which - * accepts a new request object. - */ - @Deprecated - public org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse getFieldMapping( - org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsRequest getFieldMappingsRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - getFieldMappingsRequest, - IndicesRequestConverters::getFieldMapping, - options, - org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously retrieves the field mappings on an index on indices using the Get Field Mapping API. - * - * @param getFieldMappingsRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * - * @deprecated This method uses old request and response objects which still refer to types, a deprecated feature. - * The method {@link #getFieldMappingAsync(GetFieldMappingsRequest, RequestOptions, ActionListener)} should be - * used instead, which accepts a new request object. - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable getFieldMappingAsync( - org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsRequest getFieldMappingsRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - getFieldMappingsRequest, - IndicesRequestConverters::getFieldMapping, - options, - org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse::fromXContent, - listener, - emptySet() - ); - } - /** * Retrieves the field mappings on an index or indices using the Get Field Mapping API. * @@ -1008,56 +793,6 @@ public Cancellable getAsync(GetIndexRequest getIndexRequest, RequestOptions opti ); } - /** - * Retrieve information about one or more indexes - * - * @param getIndexRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The method - * {@link #get(GetIndexRequest, RequestOptions)} should be used instead, which accepts a new request object. - */ - @Deprecated - public org.opensearch.action.admin.indices.get.GetIndexResponse get( - org.opensearch.action.admin.indices.get.GetIndexRequest getIndexRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - getIndexRequest, - IndicesRequestConverters::getIndex, - options, - org.opensearch.action.admin.indices.get.GetIndexResponse::fromXContent, - emptySet() - ); - } - - /** - * Retrieve information about one or more indexes - * - * @param getIndexRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The method - * {@link #getAsync(GetIndexRequest, RequestOptions, ActionListener)} should be used instead, which accepts a new request object. - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable getAsync( - org.opensearch.action.admin.indices.get.GetIndexRequest getIndexRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - getIndexRequest, - IndicesRequestConverters::getIndex, - options, - org.opensearch.action.admin.indices.get.GetIndexResponse::fromXContent, - listener, - emptySet() - ); - } - /** * Force merge one or more indices using the Force Merge API. * @@ -1210,53 +945,6 @@ public Cancellable existsAsync(GetIndexRequest request, RequestOptions options, ); } - /** - * Checks if the index (indices) exists or not. - * - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request - * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The method - * {@link #exists(GetIndexRequest, RequestOptions)} should be used instead, which accepts a new request object. - */ - @Deprecated - public boolean exists(org.opensearch.action.admin.indices.get.GetIndexRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequest( - request, - IndicesRequestConverters::indicesExist, - options, - RestHighLevelClient::convertExistsResponse, - Collections.emptySet() - ); - } - - /** - * Asynchronously checks if the index (indices) exists or not. - * - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The method - * {@link #existsAsync(GetIndexRequest, RequestOptions, ActionListener)} should be used instead, which accepts a new request object. - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable existsAsync( - org.opensearch.action.admin.indices.get.GetIndexRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsync( - request, - IndicesRequestConverters::indicesExist, - options, - RestHighLevelClient::convertExistsResponse, - listener, - Collections.emptySet() - ); - } - /** * Shrinks an index using the Shrink Index API. * @@ -1549,59 +1237,6 @@ public Cancellable rolloverAsync(RolloverRequest rolloverRequest, RequestOptions ); } - /** - * Rolls over an index using the Rollover Index API. - * - * @param rolloverRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * - * @deprecated This method uses deprecated request and response objects. - * The method {@link #rollover(RolloverRequest, RequestOptions)} should be used instead, which accepts a new request object. - */ - @Deprecated - public org.opensearch.action.admin.indices.rollover.RolloverResponse rollover( - org.opensearch.action.admin.indices.rollover.RolloverRequest rolloverRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - rolloverRequest, - IndicesRequestConverters::rollover, - options, - org.opensearch.action.admin.indices.rollover.RolloverResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously rolls over an index using the Rollover Index API. - * - * @param rolloverRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * - * @deprecated This method uses deprecated request and response objects. - * The method {@link #rolloverAsync(RolloverRequest, RequestOptions, ActionListener)} should be used instead, which - * accepts a new request object. - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable rolloverAsync( - org.opensearch.action.admin.indices.rollover.RolloverRequest rolloverRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - rolloverRequest, - IndicesRequestConverters::rollover, - options, - org.opensearch.action.admin.indices.rollover.RolloverResponse::fromXContent, - listener, - emptySet() - ); - } - /** * Gets one or more aliases using the Get Index Aliases API. * @@ -1684,57 +1319,6 @@ public Cancellable putSettingsAsync( ); } - /** - * Puts an index template using the Index Templates API. - * - * @param putIndexTemplateRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * @deprecated This old form of request allows types in mappings. Use {@link #putTemplate(PutIndexTemplateRequest, RequestOptions)} - * instead which introduces a new request object without types. - */ - @Deprecated - public AcknowledgedResponse putTemplate( - org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putIndexTemplateRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - putIndexTemplateRequest, - IndicesRequestConverters::putTemplate, - options, - AcknowledgedResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously puts an index template using the Index Templates API. - * - * @param putIndexTemplateRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @deprecated This old form of request allows types in mappings. - * Use {@link #putTemplateAsync(PutIndexTemplateRequest, RequestOptions, ActionListener)} - * instead which introduces a new request object without types. - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable putTemplateAsync( - org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putIndexTemplateRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - putIndexTemplateRequest, - IndicesRequestConverters::putTemplate, - options, - AcknowledgedResponse::fromXContent, - listener, - emptySet() - ); - } - /** * Puts an index template using the Index Templates API. * @@ -1906,31 +1490,6 @@ public Cancellable validateQueryAsync( ); } - /** - * Gets index templates using the Index Templates API. The mappings will be returned in a legacy deprecated format, where the - * mapping definition is nested under the type name. - * - * @param getIndexTemplatesRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * @deprecated This method uses an old response object which still refers to types, a deprecated feature. Use - * {@link #getIndexTemplate(GetIndexTemplatesRequest, RequestOptions)} instead which returns a new response object - */ - @Deprecated - public org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse getTemplate( - GetIndexTemplatesRequest getIndexTemplatesRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - getIndexTemplatesRequest, - IndicesRequestConverters::getTemplatesWithDocumentTypes, - options, - org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse::fromXContent, - emptySet() - ); - } - /** * Gets index templates using the Index Templates API * @@ -1994,33 +1553,6 @@ public GetIndexTemplatesResponse getIndexTemplate(GetIndexTemplatesRequest getIn ); } - /** - * Asynchronously gets index templates using the Index Templates API. The mappings will be returned in a legacy deprecated format, - * where the mapping definition is nested under the type name. - * - * @param getIndexTemplatesRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @deprecated This method uses an old response object which still refers to types, a deprecated feature. Use - * {@link #getIndexTemplateAsync(GetIndexTemplatesRequest, RequestOptions, ActionListener)} instead which returns a new response object - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable getTemplateAsync( - GetIndexTemplatesRequest getIndexTemplatesRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - getIndexTemplatesRequest, - IndicesRequestConverters::getTemplatesWithDocumentTypes, - options, - org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse::fromXContent, - listener, - emptySet() - ); - } - /** * Asynchronously gets index templates using the Index Templates API * diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java index 9979d18635d05..c50ea58982e4e 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java @@ -78,8 +78,6 @@ import java.io.IOException; import java.util.Locale; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; - final class IndicesRequestConverters { private IndicesRequestConverters() {} @@ -165,20 +163,6 @@ static Request createIndex(CreateIndexRequest createIndexRequest) throws IOExcep return request; } - static Request createIndex(org.opensearch.action.admin.indices.create.CreateIndexRequest createIndexRequest) throws IOException { - String endpoint = RequestConverters.endpoint(createIndexRequest.indices()); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - - RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withTimeout(createIndexRequest.timeout()); - parameters.withMasterTimeout(createIndexRequest.masterNodeTimeout()); - parameters.withWaitForActiveShards(createIndexRequest.waitForActiveShards()); - parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.addParameters(parameters.asMap()); - request.setEntity(RequestConverters.createEntity(createIndexRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); - return request; - } - static Request updateAliases(IndicesAliasesRequest indicesAliasesRequest) throws IOException { Request request = new Request(HttpPost.METHOD_NAME, "/_aliases"); @@ -202,31 +186,6 @@ static Request putMapping(PutMappingRequest putMappingRequest) throws IOExceptio return request; } - /** - * converter for the legacy server-side {@link org.opensearch.action.admin.indices.mapping.put.PutMappingRequest} that still supports - * types - */ - @Deprecated - static Request putMapping(org.opensearch.action.admin.indices.mapping.put.PutMappingRequest putMappingRequest) throws IOException { - // The concreteIndex is an internal concept, not applicable to requests made over the REST API. - if (putMappingRequest.getConcreteIndex() != null) { - throw new IllegalArgumentException("concreteIndex cannot be set on PutMapping requests made over the REST API"); - } - - Request request = new Request( - HttpPut.METHOD_NAME, - RequestConverters.endpoint(putMappingRequest.indices(), "_mapping", putMappingRequest.type()) - ); - - RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withTimeout(putMappingRequest.timeout()); - parameters.withMasterTimeout(putMappingRequest.masterNodeTimeout()); - parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.addParameters(parameters.asMap()); - request.setEntity(RequestConverters.createEntity(putMappingRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); - return request; - } - static Request getMappings(GetMappingsRequest getMappingsRequest) { String[] indices = getMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getMappingsRequest.indices(); @@ -240,22 +199,6 @@ static Request getMappings(GetMappingsRequest getMappingsRequest) { return request; } - @Deprecated - static Request getMappings(org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest getMappingsRequest) { - String[] indices = getMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getMappingsRequest.indices(); - String[] types = getMappingsRequest.types() == null ? Strings.EMPTY_ARRAY : getMappingsRequest.types(); - - Request request = new Request(HttpGet.METHOD_NAME, RequestConverters.endpoint(indices, "_mapping", types)); - - RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withMasterTimeout(getMappingsRequest.masterNodeTimeout()); - parameters.withIndicesOptions(getMappingsRequest.indicesOptions()); - parameters.withLocal(getMappingsRequest.local()); - parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.addParameters(parameters.asMap()); - return request; - } - static Request getFieldMapping(GetFieldMappingsRequest getFieldMappingsRequest) { String[] indices = getFieldMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.indices(); String[] fields = getFieldMappingsRequest.fields() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.fields(); @@ -275,30 +218,6 @@ static Request getFieldMapping(GetFieldMappingsRequest getFieldMappingsRequest) return request; } - @Deprecated - static Request getFieldMapping(org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsRequest getFieldMappingsRequest) { - String[] indices = getFieldMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.indices(); - String[] types = getFieldMappingsRequest.types() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.types(); - String[] fields = getFieldMappingsRequest.fields() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.fields(); - - String endpoint = new RequestConverters.EndpointBuilder().addCommaSeparatedPathParts(indices) - .addPathPartAsIs("_mapping") - .addCommaSeparatedPathParts(types) - .addPathPartAsIs("field") - .addCommaSeparatedPathParts(fields) - .build(); - - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withIndicesOptions(getFieldMappingsRequest.indicesOptions()); - parameters.withIncludeDefaults(getFieldMappingsRequest.includeDefaults()); - parameters.withLocal(getFieldMappingsRequest.local()); - parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.addParameters(parameters.asMap()); - return request; - } - static Request refresh(RefreshRequest refreshRequest) { String[] indices = refreshRequest.indices() == null ? Strings.EMPTY_ARRAY : refreshRequest.indices(); Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_refresh")); @@ -456,27 +375,6 @@ static Request rollover(RolloverRequest rolloverRequest) throws IOException { return request; } - @Deprecated - static Request rollover(org.opensearch.action.admin.indices.rollover.RolloverRequest rolloverRequest) throws IOException { - String endpoint = new RequestConverters.EndpointBuilder().addPathPart(rolloverRequest.getRolloverTarget()) - .addPathPartAsIs("_rollover") - .addPathPart(rolloverRequest.getNewIndexName()) - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - params.withTimeout(rolloverRequest.timeout()); - params.withMasterTimeout(rolloverRequest.masterNodeTimeout()); - params.withWaitForActiveShards(rolloverRequest.getCreateIndexRequest().waitForActiveShards()); - if (rolloverRequest.isDryRun()) { - params.putParam("dry_run", Boolean.TRUE.toString()); - } - params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.setEntity(RequestConverters.createEntity(rolloverRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); - request.addParameters(params.asMap()); - return request; - } - static Request getSettings(GetSettingsRequest getSettingsRequest) { String[] indices = getSettingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.indices(); String[] names = getSettingsRequest.names() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.names(); @@ -493,28 +391,6 @@ static Request getSettings(GetSettingsRequest getSettingsRequest) { return request; } - /** - * converter for the legacy server-side {@link org.opensearch.action.admin.indices.get.GetIndexRequest} that - * still supports types - */ - @Deprecated - static Request getIndex(org.opensearch.action.admin.indices.get.GetIndexRequest getIndexRequest) { - String[] indices = getIndexRequest.indices() == null ? Strings.EMPTY_ARRAY : getIndexRequest.indices(); - - String endpoint = RequestConverters.endpoint(indices); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - params.withIndicesOptions(getIndexRequest.indicesOptions()); - params.withLocal(getIndexRequest.local()); - params.withIncludeDefaults(getIndexRequest.includeDefaults()); - params.withHuman(getIndexRequest.humanReadable()); - params.withMasterTimeout(getIndexRequest.masterNodeTimeout()); - params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.addParameters(params.asMap()); - return request; - } - static Request getIndex(GetIndexRequest getIndexRequest) { String[] indices = getIndexRequest.indices() == null ? Strings.EMPTY_ARRAY : getIndexRequest.indices(); @@ -531,28 +407,6 @@ static Request getIndex(GetIndexRequest getIndexRequest) { return request; } - /** - * converter for the legacy server-side {@link org.opensearch.action.admin.indices.get.GetIndexRequest} that - * still supports types - */ - @Deprecated - static Request indicesExist(org.opensearch.action.admin.indices.get.GetIndexRequest getIndexRequest) { - if (getIndexRequest.indices() == null || getIndexRequest.indices().length == 0) { - throw new IllegalArgumentException("indices are mandatory"); - } - String endpoint = RequestConverters.endpoint(getIndexRequest.indices(), ""); - Request request = new Request(HttpHead.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - params.withLocal(getIndexRequest.local()); - params.withHuman(getIndexRequest.humanReadable()); - params.withIndicesOptions(getIndexRequest.indicesOptions()); - params.withIncludeDefaults(getIndexRequest.includeDefaults()); - params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.addParameters(params.asMap()); - return request; - } - static Request indicesExist(GetIndexRequest getIndexRequest) { if (getIndexRequest.indices() == null || getIndexRequest.indices().length == 0) { throw new IllegalArgumentException("indices are mandatory"); @@ -583,31 +437,6 @@ static Request indexPutSettings(UpdateSettingsRequest updateSettingsRequest) thr return request; } - /** - * @deprecated This uses the old form of PutIndexTemplateRequest which uses types. - * Use (@link {@link #putTemplate(PutIndexTemplateRequest)} instead - */ - @Deprecated - static Request putTemplate(org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putIndexTemplateRequest) - throws IOException { - String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template") - .addPathPart(putIndexTemplateRequest.name()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout()); - if (putIndexTemplateRequest.create()) { - params.putParam("create", Boolean.TRUE.toString()); - } - if (Strings.hasText(putIndexTemplateRequest.cause())) { - params.putParam("cause", putIndexTemplateRequest.cause()); - } - params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.addParameters(params.asMap()); - request.setEntity(RequestConverters.createEntity(putIndexTemplateRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); - return request; - } - static Request putTemplate(PutIndexTemplateRequest putIndexTemplateRequest) throws IOException { String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template") .addPathPart(putIndexTemplateRequest.name()) @@ -669,8 +498,7 @@ static Request simulateIndexTemplate(SimulateIndexTemplateRequest simulateIndexT static Request validateQuery(ValidateQueryRequest validateQueryRequest) throws IOException { String[] indices = validateQueryRequest.indices() == null ? Strings.EMPTY_ARRAY : validateQueryRequest.indices(); - String[] types = validateQueryRequest.types() == null || indices.length <= 0 ? Strings.EMPTY_ARRAY : validateQueryRequest.types(); - String endpoint = RequestConverters.endpoint(indices, types, "_validate/query"); + String endpoint = RequestConverters.endpoint(indices, "_validate/query"); Request request = new Request(HttpGet.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); params.withIndicesOptions(validateQueryRequest.indicesOptions()); @@ -694,16 +522,7 @@ static Request getAlias(GetAliasesRequest getAliasesRequest) { return request; } - @Deprecated - static Request getTemplatesWithDocumentTypes(GetIndexTemplatesRequest getIndexTemplatesRequest) { - return getTemplates(getIndexTemplatesRequest, true); - } - static Request getTemplates(GetIndexTemplatesRequest getIndexTemplatesRequest) { - return getTemplates(getIndexTemplatesRequest, false); - } - - private static Request getTemplates(GetIndexTemplatesRequest getIndexTemplatesRequest, boolean includeTypeName) { final String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template") .addCommaSeparatedPathParts(getIndexTemplatesRequest.names()) .build(); @@ -711,9 +530,6 @@ private static Request getTemplates(GetIndexTemplatesRequest getIndexTemplatesRe final RequestConverters.Params params = new RequestConverters.Params(); params.withLocal(getIndexTemplatesRequest.isLocal()); params.withMasterTimeout(getIndexTemplatesRequest.getMasterNodeTimeout()); - if (includeTypeName) { - params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - } request.addParameters(params.asMap()); return request; } diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java index f0f33ae1e71fe..3e43963db519f 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java @@ -117,7 +117,7 @@ private RequestConverters() { } static Request delete(DeleteRequest deleteRequest) { - String endpoint = endpoint(deleteRequest.index(), deleteRequest.type(), deleteRequest.id()); + String endpoint = endpoint(deleteRequest.index(), deleteRequest.id()); Request request = new Request(HttpDelete.METHOD_NAME, endpoint); Params parameters = new Params(); @@ -185,11 +185,6 @@ static Request bulk(BulkRequest bulkRequest) throws IOException { if (Strings.hasLength(action.index())) { metadata.field("_index", action.index()); } - if (Strings.hasLength(action.type())) { - if (MapperService.SINGLE_MAPPING_NAME.equals(action.type()) == false) { - metadata.field("_type", action.type()); - } - } if (Strings.hasLength(action.id())) { metadata.field("_id", action.id()); } @@ -284,7 +279,7 @@ static Request get(GetRequest getRequest) { } private static Request getStyleRequest(String method, GetRequest getRequest) { - Request request = new Request(method, endpoint(getRequest.index(), getRequest.type(), getRequest.id())); + Request request = new Request(method, endpoint(getRequest.index(), getRequest.id())); Params parameters = new Params(); parameters.withPreference(getRequest.preference()); @@ -315,13 +310,7 @@ private static Request sourceRequest(GetSourceRequest getSourceRequest, String h parameters.withRealtime(getSourceRequest.realtime()); parameters.withFetchSourceContext(getSourceRequest.fetchSourceContext()); - String optionalType = getSourceRequest.type(); - String endpoint; - if (optionalType == null) { - endpoint = endpoint(getSourceRequest.index(), "_source", getSourceRequest.id()); - } else { - endpoint = endpoint(getSourceRequest.index(), optionalType, getSourceRequest.id(), "_source"); - } + String endpoint = endpoint(getSourceRequest.index(), "_source", getSourceRequest.id()); Request request = new Request(httpMethodName, endpoint); request.addParameters(parameters.asMap()); return request; @@ -344,11 +333,9 @@ static Request index(IndexRequest indexRequest) { String endpoint; if (indexRequest.opType() == DocWriteRequest.OpType.CREATE) { - endpoint = indexRequest.type().equals(MapperService.SINGLE_MAPPING_NAME) - ? endpoint(indexRequest.index(), "_create", indexRequest.id()) - : endpoint(indexRequest.index(), indexRequest.type(), indexRequest.id(), "_create"); + endpoint = endpoint(indexRequest.index(), "_create", indexRequest.id()); } else { - endpoint = endpoint(indexRequest.index(), indexRequest.type(), indexRequest.id()); + endpoint = endpoint(indexRequest.index(), indexRequest.id()); } Request request = new Request(method, endpoint); @@ -377,9 +364,7 @@ static Request ping() { } static Request update(UpdateRequest updateRequest) throws IOException { - String endpoint = updateRequest.type().equals(MapperService.SINGLE_MAPPING_NAME) - ? endpoint(updateRequest.index(), "_update", updateRequest.id()) - : endpoint(updateRequest.index(), updateRequest.type(), updateRequest.id(), "_update"); + String endpoint = endpoint(updateRequest.index(), "_update", updateRequest.id()); Request request = new Request(HttpPost.METHOD_NAME, endpoint); Params parameters = new Params(); @@ -432,7 +417,7 @@ static Request update(UpdateRequest updateRequest) throws IOException { * for standard searches */ static Request search(SearchRequest searchRequest, String searchEndpoint) throws IOException { - Request request = new Request(HttpPost.METHOD_NAME, endpoint(searchRequest.indices(), searchRequest.types(), searchEndpoint)); + Request request = new Request(HttpPost.METHOD_NAME, endpoint(searchRequest.indices(), searchEndpoint)); Params params = new Params(); addSearchRequestParams(params, searchRequest); @@ -502,7 +487,7 @@ static Request searchTemplate(SearchTemplateRequest searchTemplateRequest) throw request = new Request(HttpGet.METHOD_NAME, "_render/template"); } else { SearchRequest searchRequest = searchTemplateRequest.getRequest(); - String endpoint = endpoint(searchRequest.indices(), searchRequest.types(), "_search/template"); + String endpoint = endpoint(searchRequest.indices(), "_search/template"); request = new Request(HttpGet.METHOD_NAME, endpoint); Params params = new Params(); @@ -548,9 +533,7 @@ static Request count(CountRequest countRequest) throws IOException { } static Request explain(ExplainRequest explainRequest) throws IOException { - String endpoint = explainRequest.type().equals(MapperService.SINGLE_MAPPING_NAME) - ? endpoint(explainRequest.index(), "_explain", explainRequest.id()) - : endpoint(explainRequest.index(), explainRequest.type(), explainRequest.id(), "_explain"); + String endpoint = endpoint(explainRequest.index(), "_explain", explainRequest.id()); Request request = new Request(HttpGet.METHOD_NAME, endpoint); Params params = new Params(); @@ -633,7 +616,7 @@ private static Request prepareReindexRequest(ReindexRequest reindexRequest, bool private static Request prepareDeleteByQueryRequest(DeleteByQueryRequest deleteByQueryRequest, boolean waitForCompletion) throws IOException { - String endpoint = endpoint(deleteByQueryRequest.indices(), deleteByQueryRequest.getDocTypes(), "_delete_by_query"); + String endpoint = endpoint(deleteByQueryRequest.indices(), "_delete_by_query"); Request request = new Request(HttpPost.METHOD_NAME, endpoint); Params params = new Params().withRouting(deleteByQueryRequest.getRouting()) .withRefresh(deleteByQueryRequest.isRefresh()) @@ -661,7 +644,7 @@ private static Request prepareDeleteByQueryRequest(DeleteByQueryRequest deleteBy } static Request prepareUpdateByQueryRequest(UpdateByQueryRequest updateByQueryRequest, boolean waitForCompletion) throws IOException { - String endpoint = endpoint(updateByQueryRequest.indices(), updateByQueryRequest.getDocTypes(), "_update_by_query"); + String endpoint = endpoint(updateByQueryRequest.indices(), "_update_by_query"); Request request = new Request(HttpPost.METHOD_NAME, endpoint); Params params = new Params().withRouting(updateByQueryRequest.getRouting()) .withPipeline(updateByQueryRequest.getPipeline()) @@ -799,10 +782,16 @@ static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType, return new NByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType)); } + static String endpoint(String index, String id) { + return new EndpointBuilder().addPathPart(index, MapperService.SINGLE_MAPPING_NAME, id).build(); + } + + @Deprecated static String endpoint(String index, String type, String id) { return new EndpointBuilder().addPathPart(index, type, id).build(); } + @Deprecated static String endpoint(String index, String type, String id, String endpoint) { return new EndpointBuilder().addPathPart(index, type, id).addPathPartAsIs(endpoint).build(); } @@ -815,6 +804,7 @@ static String endpoint(String[] indices, String endpoint) { return new EndpointBuilder().addCommaSeparatedPathParts(indices).addPathPartAsIs(endpoint).build(); } + @Deprecated static String endpoint(String[] indices, String[] types, String endpoint) { return new EndpointBuilder().addCommaSeparatedPathParts(indices) .addCommaSeparatedPathParts(types) @@ -829,6 +819,7 @@ static String endpoint(String[] indices, String endpoint, String[] suffixes) { .build(); } + @Deprecated static String endpoint(String[] indices, String endpoint, String type) { return new EndpointBuilder().addCommaSeparatedPathParts(indices).addPathPartAsIs(endpoint).addPathPart(type).build(); } diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/core/TermVectorsResponse.java b/client/rest-high-level/src/main/java/org/opensearch/client/core/TermVectorsResponse.java index 757e0df6aee77..fa13abf72207e 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/core/TermVectorsResponse.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/core/TermVectorsResponse.java @@ -46,24 +46,14 @@ public class TermVectorsResponse { private final String index; - private final String type; private final String id; private final long docVersion; private final boolean found; private final long tookInMillis; private final List termVectorList; - public TermVectorsResponse( - String index, - String type, - String id, - long version, - boolean found, - long tookInMillis, - List termVectorList - ) { + public TermVectorsResponse(String index, String id, long version, boolean found, long tookInMillis, List termVectorList) { this.index = index; - this.type = type; this.id = id; this.docVersion = version; this.found = found; @@ -75,19 +65,18 @@ public TermVectorsResponse( "term_vectors", true, args -> { - // as the response comes from server, we are sure that args[6] will be a list of TermVector + // as the response comes from server, we are sure that args[5] will be a list of TermVector @SuppressWarnings("unchecked") - List termVectorList = (List) args[6]; + List termVectorList = (List) args[5]; if (termVectorList != null) { Collections.sort(termVectorList, Comparator.comparing(TermVector::getFieldName)); } return new TermVectorsResponse( (String) args[0], (String) args[1], - (String) args[2], - (long) args[3], - (boolean) args[4], - (long) args[5], + (long) args[2], + (boolean) args[3], + (long) args[4], termVectorList ); } @@ -95,7 +84,6 @@ public TermVectorsResponse( static { PARSER.declareString(constructorArg(), new ParseField("_index")); - PARSER.declareString(constructorArg(), new ParseField("_type")); PARSER.declareString(optionalConstructorArg(), new ParseField("_id")); PARSER.declareLong(constructorArg(), new ParseField("_version")); PARSER.declareBoolean(constructorArg(), new ParseField("found")); @@ -118,16 +106,6 @@ public String getIndex() { return index; } - /** - * Returns the type for the response - * - * @deprecated Types are in the process of being removed. - */ - @Deprecated - public String getType() { - return type; - } - /** * Returns the id of the request * can be NULL if there is no document ID @@ -171,7 +149,6 @@ public boolean equals(Object obj) { if (!(obj instanceof TermVectorsResponse)) return false; TermVectorsResponse other = (TermVectorsResponse) obj; return index.equals(other.index) - && type.equals(other.type) && Objects.equals(id, other.id) && docVersion == other.docVersion && found == other.found @@ -181,7 +158,7 @@ public boolean equals(Object obj) { @Override public int hashCode() { - return Objects.hash(index, type, id, docVersion, found, tookInMillis, termVectorList); + return Objects.hash(index, id, docVersion, found, tookInMillis, termVectorList); } public static final class TermVector { diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/AbstractResponseTestCase.java b/client/rest-high-level/src/test/java/org/opensearch/client/AbstractResponseTestCase.java index 06ceee78e5566..95188ec0f8e96 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/AbstractResponseTestCase.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/AbstractResponseTestCase.java @@ -32,6 +32,7 @@ package org.opensearch.client; import org.opensearch.common.bytes.BytesReference; +import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.NamedXContentRegistry; import org.opensearch.common.xcontent.ToXContent; @@ -42,6 +43,10 @@ import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; /** * Base class for HLRC response parsing tests. @@ -99,4 +104,16 @@ protected ToXContent.Params getParams() { return ToXContent.EMPTY_PARAMS; } + protected static void assertMapEquals(ImmutableOpenMap expected, Map actual) { + Set expectedKeys = new HashSet<>(); + Iterator keysIt = expected.keysIt(); + while (keysIt.hasNext()) { + expectedKeys.add(keysIt.next()); + } + + assertEquals(expectedKeys, actual.keySet()); + for (String key : expectedKeys) { + assertEquals(expected.get(key), actual.get(key)); + } + } } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/BulkProcessorIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/BulkProcessorIT.java index cae1298a8793d..cc9abdccf4c9f 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/BulkProcessorIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/BulkProcessorIT.java @@ -49,11 +49,8 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.index.mapper.MapperService; -import org.opensearch.rest.action.document.RestBulkAction; import org.opensearch.search.SearchHit; import org.hamcrest.Matcher; -import org.hamcrest.Matchers; import java.io.IOException; import java.util.Arrays; @@ -69,9 +66,7 @@ import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; import static org.opensearch.test.hamcrest.OpenSearchAssertions.fieldFromSource; import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasId; -import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasIndex; import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasProperty; -import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasType; import static org.hamcrest.Matchers.both; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.either; @@ -96,17 +91,6 @@ private static BulkProcessor.Builder initBulkProcessorBuilder(BulkProcessor.List ); } - private static BulkProcessor.Builder initBulkProcessorBuilderUsingTypes(BulkProcessor.Listener listener) { - return BulkProcessor.builder( - (request, bulkListener) -> highLevelClient().bulkAsync( - request, - expectWarningsOnce(RestBulkAction.TYPES_DEPRECATION_MESSAGE), - bulkListener - ), - listener - ); - } - public void testThatBulkProcessorCountIsCorrect() throws Exception { final CountDownLatch latch = new CountDownLatch(1); BulkProcessorTestListener listener = new BulkProcessorTestListener(latch); @@ -210,7 +194,6 @@ public void testBulkProcessorConcurrentRequests() throws Exception { for (BulkItemResponse bulkItemResponse : listener.bulkItems) { assertThat(bulkItemResponse.getFailureMessage(), bulkItemResponse.isFailed(), equalTo(false)); assertThat(bulkItemResponse.getIndex(), equalTo("test")); - assertThat(bulkItemResponse.getType(), equalTo("_doc")); // with concurrent requests > 1 we can't rely on the order of the bulk requests assertThat(Integer.valueOf(bulkItemResponse.getId()), both(greaterThan(0)).and(lessThanOrEqualTo(numDocs))); // we do want to check that we don't get duplicate ids back @@ -317,7 +300,6 @@ public void testBulkProcessorConcurrentRequestsReadOnlyIndex() throws Exception Set readOnlyIds = new HashSet<>(); for (BulkItemResponse bulkItemResponse : listener.bulkItems) { assertThat(bulkItemResponse.getIndex(), either(equalTo("test")).or(equalTo("test-ro"))); - assertThat(bulkItemResponse.getType(), equalTo("_doc")); if (bulkItemResponse.getIndex().equals("test")) { assertThat(bulkItemResponse.isFailed(), equalTo(false)); // with concurrent requests > 1 we can't rely on the order of the bulk requests @@ -346,7 +328,6 @@ public void testGlobalParametersAndSingleRequest() throws Exception { // tag::bulk-processor-mix-parameters try (BulkProcessor processor = initBulkProcessorBuilder(listener) .setGlobalIndex("tweets") - .setGlobalType("_doc") .setGlobalRouting("routing") .setGlobalPipeline("pipeline_id") .build()) { @@ -373,85 +354,9 @@ public void testGlobalParametersAndBulkProcessor() throws Exception { createIndexWithMultipleShards("test"); createFieldAddingPipleine("pipeline_id", "fieldNameXYZ", "valueXYZ"); - final String customType = "testType"; - final String ignoredType = "ignoredType"; int numDocs = randomIntBetween(10, 10); { - final CountDownLatch latch = new CountDownLatch(1); - BulkProcessorTestListener listener = new BulkProcessorTestListener(latch); - // Check that untyped document additions inherit the global type - String globalType = customType; - String localType = null; - try ( - BulkProcessor processor = initBulkProcessorBuilderUsingTypes(listener) - // let's make sure that the bulk action limit trips, one single execution will index all the documents - .setConcurrentRequests(randomIntBetween(0, 1)) - .setBulkActions(numDocs) - .setFlushInterval(TimeValue.timeValueHours(24)) - .setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB)) - .setGlobalIndex("test") - .setGlobalType(globalType) - .setGlobalRouting("routing") - .setGlobalPipeline("pipeline_id") - .build() - ) { - - indexDocs(processor, numDocs, null, localType, "test", globalType, "pipeline_id"); - latch.await(); - - assertThat(listener.beforeCounts.get(), equalTo(1)); - assertThat(listener.afterCounts.get(), equalTo(1)); - assertThat(listener.bulkFailures.size(), equalTo(0)); - assertResponseItems(listener.bulkItems, numDocs, globalType); - - Iterable hits = searchAll(new SearchRequest("test").routing("routing")); - - assertThat(hits, everyItem(hasProperty(fieldFromSource("fieldNameXYZ"), equalTo("valueXYZ")))); - assertThat(hits, everyItem(Matchers.allOf(hasIndex("test"), hasType(globalType)))); - assertThat(hits, containsInAnyOrder(expectedIds(numDocs))); - } - - } - { - // Check that typed document additions don't inherit the global type - String globalType = ignoredType; - String localType = customType; - final CountDownLatch latch = new CountDownLatch(1); - BulkProcessorTestListener listener = new BulkProcessorTestListener(latch); - try ( - BulkProcessor processor = initBulkProcessorBuilderUsingTypes(listener) - // let's make sure that the bulk action limit trips, one single execution will index all the documents - .setConcurrentRequests(randomIntBetween(0, 1)) - .setBulkActions(numDocs) - .setFlushInterval(TimeValue.timeValueHours(24)) - .setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB)) - .setGlobalIndex("test") - .setGlobalType(globalType) - .setGlobalRouting("routing") - .setGlobalPipeline("pipeline_id") - .build() - ) { - indexDocs(processor, numDocs, null, localType, "test", globalType, "pipeline_id"); - latch.await(); - - assertThat(listener.beforeCounts.get(), equalTo(1)); - assertThat(listener.afterCounts.get(), equalTo(1)); - assertThat(listener.bulkFailures.size(), equalTo(0)); - assertResponseItems(listener.bulkItems, numDocs, localType); - - Iterable hits = searchAll(new SearchRequest("test").routing("routing")); - - assertThat(hits, everyItem(hasProperty(fieldFromSource("fieldNameXYZ"), equalTo("valueXYZ")))); - assertThat(hits, everyItem(Matchers.allOf(hasIndex("test"), hasType(localType)))); - assertThat(hits, containsInAnyOrder(expectedIds(numDocs))); - } - } - { - // Check that untyped document additions and untyped global inherit the established custom type - // (the custom document type introduced to the mapping by the earlier code in this test) - String globalType = null; - String localType = null; final CountDownLatch latch = new CountDownLatch(1); BulkProcessorTestListener listener = new BulkProcessorTestListener(latch); try ( @@ -462,23 +367,22 @@ public void testGlobalParametersAndBulkProcessor() throws Exception { .setFlushInterval(TimeValue.timeValueHours(24)) .setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB)) .setGlobalIndex("test") - .setGlobalType(globalType) .setGlobalRouting("routing") .setGlobalPipeline("pipeline_id") .build() ) { - indexDocs(processor, numDocs, null, localType, "test", globalType, "pipeline_id"); + + indexDocs(processor, numDocs, null, "test", "pipeline_id"); latch.await(); assertThat(listener.beforeCounts.get(), equalTo(1)); assertThat(listener.afterCounts.get(), equalTo(1)); assertThat(listener.bulkFailures.size(), equalTo(0)); - assertResponseItems(listener.bulkItems, numDocs, MapperService.SINGLE_MAPPING_NAME); + assertResponseItems(listener.bulkItems, numDocs); Iterable hits = searchAll(new SearchRequest("test").routing("routing")); assertThat(hits, everyItem(hasProperty(fieldFromSource("fieldNameXYZ"), equalTo("valueXYZ")))); - assertThat(hits, everyItem(Matchers.allOf(hasIndex("test"), hasType(customType)))); assertThat(hits, containsInAnyOrder(expectedIds(numDocs))); } } @@ -489,45 +393,31 @@ private Matcher[] expectedIds(int numDocs) { return IntStream.rangeClosed(1, numDocs).boxed().map(n -> hasId(n.toString())).>toArray(Matcher[]::new); } - private MultiGetRequest indexDocs( - BulkProcessor processor, - int numDocs, - String localIndex, - String localType, - String globalIndex, - String globalType, - String globalPipeline - ) throws Exception { + private MultiGetRequest indexDocs(BulkProcessor processor, int numDocs, String localIndex, String globalIndex, String globalPipeline) + throws Exception { MultiGetRequest multiGetRequest = new MultiGetRequest(); for (int i = 1; i <= numDocs; i++) { if (randomBoolean()) { processor.add( - new IndexRequest(localIndex, localType, Integer.toString(i)).source( - XContentType.JSON, - "field", - randomRealisticUnicodeOfLengthBetween(1, 30) - ) + new IndexRequest(localIndex).id(Integer.toString(i)) + .source(XContentType.JSON, "field", randomRealisticUnicodeOfLengthBetween(1, 30)) ); } else { - BytesArray data = bytesBulkRequest(localIndex, localType, i); - processor.add(data, globalIndex, globalType, globalPipeline, XContentType.JSON); + BytesArray data = bytesBulkRequest(localIndex, i); + processor.add(data, globalIndex, globalPipeline, XContentType.JSON); } multiGetRequest.add(localIndex, Integer.toString(i)); } return multiGetRequest; } - private static BytesArray bytesBulkRequest(String localIndex, String localType, int id) throws IOException { + private static BytesArray bytesBulkRequest(String localIndex, int id) throws IOException { XContentBuilder action = jsonBuilder().startObject().startObject("index"); if (localIndex != null) { action.field("_index", localIndex); } - if (localType != null) { - action.field("_type", localType); - } - action.field("_id", Integer.toString(id)); action.endObject().endObject(); @@ -538,19 +428,14 @@ private static BytesArray bytesBulkRequest(String localIndex, String localType, } private MultiGetRequest indexDocs(BulkProcessor processor, int numDocs) throws Exception { - return indexDocs(processor, numDocs, "test", null, null, null, null); + return indexDocs(processor, numDocs, "test", null, null); } private static void assertResponseItems(List bulkItemResponses, int numDocs) { - assertResponseItems(bulkItemResponses, numDocs, MapperService.SINGLE_MAPPING_NAME); - } - - private static void assertResponseItems(List bulkItemResponses, int numDocs, String expectedType) { assertThat(bulkItemResponses.size(), is(numDocs)); int i = 1; for (BulkItemResponse bulkItemResponse : bulkItemResponses) { assertThat(bulkItemResponse.getIndex(), equalTo("test")); - assertThat(bulkItemResponse.getType(), equalTo(expectedType)); assertThat(bulkItemResponse.getId(), equalTo(Integer.toString(i++))); assertThat( "item " + i + " failed with cause: " + bulkItemResponse.getFailureMessage(), @@ -565,7 +450,6 @@ private static void assertMultiGetResponse(MultiGetResponse multiGetResponse, in int i = 1; for (MultiGetItemResponse multiGetItemResponse : multiGetResponse) { assertThat(multiGetItemResponse.getIndex(), equalTo("test")); - assertThat(multiGetItemResponse.getType(), equalTo("_doc")); assertThat(multiGetItemResponse.getId(), equalTo(Integer.toString(i++))); } } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/BulkRequestWithGlobalParametersIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/BulkRequestWithGlobalParametersIT.java index d42cb7abe2c4c..35fc9d88e316c 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/BulkRequestWithGlobalParametersIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/BulkRequestWithGlobalParametersIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequest; import org.opensearch.action.search.SearchRequest; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.rest.action.document.RestBulkAction; import org.opensearch.search.SearchHit; import java.io.IOException; @@ -46,7 +45,6 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasId; import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasIndex; import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasProperty; -import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasType; import static org.hamcrest.Matchers.both; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.emptyIterable; @@ -117,7 +115,7 @@ public void testMixPipelineOnRequestAndGlobal() throws IOException { } public void testGlobalIndex() throws IOException { - BulkRequest request = new BulkRequest("global_index", null); + BulkRequest request = new BulkRequest("global_index"); request.add(new IndexRequest().id("1").source(XContentType.JSON, "field", "bulk1")); request.add(new IndexRequest().id("2").source(XContentType.JSON, "field", "bulk2")); @@ -129,7 +127,7 @@ public void testGlobalIndex() throws IOException { @SuppressWarnings("unchecked") public void testIndexGlobalAndPerRequest() throws IOException { - BulkRequest request = new BulkRequest("global_index", null); + BulkRequest request = new BulkRequest("global_index"); request.add(new IndexRequest("local_index").id("1").source(XContentType.JSON, "field", "bulk1")); request.add( new IndexRequest().id("2") // will take global index @@ -142,31 +140,6 @@ public void testIndexGlobalAndPerRequest() throws IOException { assertThat(hits, containsInAnyOrder(both(hasId("1")).and(hasIndex("local_index")), both(hasId("2")).and(hasIndex("global_index")))); } - public void testGlobalType() throws IOException { - BulkRequest request = new BulkRequest(null, "global_type"); - request.add(new IndexRequest("index").id("1").source(XContentType.JSON, "field", "bulk1")); - request.add(new IndexRequest("index").id("2").source(XContentType.JSON, "field", "bulk2")); - - bulkWithTypes(request); - - Iterable hits = searchAll("index"); - assertThat(hits, everyItem(hasType("global_type"))); - } - - public void testTypeGlobalAndPerRequest() throws IOException { - BulkRequest request = new BulkRequest(null, "global_type"); - request.add(new IndexRequest("index1", "local_type", "1").source(XContentType.JSON, "field", "bulk1")); - request.add( - new IndexRequest("index2").id("2") // will take global type - .source(XContentType.JSON, "field", "bulk2") - ); - - bulkWithTypes(request); - - Iterable hits = searchAll("index1", "index2"); - assertThat(hits, containsInAnyOrder(both(hasId("1")).and(hasType("local_type")), both(hasId("2")).and(hasType("global_type")))); - } - public void testGlobalRouting() throws IOException { createIndexWithMultipleShards("index"); BulkRequest request = new BulkRequest((String) null); @@ -194,28 +167,6 @@ public void testMixLocalAndGlobalRouting() throws IOException { assertThat(hits, containsInAnyOrder(hasId("1"), hasId("2"))); } - public void testGlobalIndexNoTypes() throws IOException { - BulkRequest request = new BulkRequest("global_index"); - request.add(new IndexRequest().id("1").source(XContentType.JSON, "field", "bulk1")); - request.add(new IndexRequest().id("2").source(XContentType.JSON, "field", "bulk2")); - - bulk(request); - - Iterable hits = searchAll("global_index"); - assertThat(hits, everyItem(hasIndex("global_index"))); - } - - private BulkResponse bulkWithTypes(BulkRequest request) throws IOException { - BulkResponse bulkResponse = execute( - request, - highLevelClient()::bulk, - highLevelClient()::bulkAsync, - expectWarningsOnce(RestBulkAction.TYPES_DEPRECATION_MESSAGE) - ); - assertFalse(bulkResponse.hasFailures()); - return bulkResponse; - } - private BulkResponse bulk(BulkRequest request) throws IOException { BulkResponse bulkResponse = execute(request, highLevelClient()::bulk, highLevelClient()::bulkAsync, RequestOptions.DEFAULT); assertFalse(bulkResponse.hasFailures()); diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/CrudIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/CrudIT.java index d26a71701341e..999c2a0e7643b 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/CrudIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/CrudIT.java @@ -69,8 +69,6 @@ import org.opensearch.index.VersionType; import org.opensearch.index.get.GetResult; import org.opensearch.rest.RestStatus; -import org.opensearch.rest.action.document.RestBulkAction; -import org.opensearch.rest.action.document.RestMultiGetAction; import org.opensearch.script.Script; import org.opensearch.script.ScriptType; import org.opensearch.search.fetch.subphase.FetchSourceContext; @@ -110,7 +108,6 @@ public void testDelete() throws IOException { } DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync); assertEquals("index", deleteResponse.getIndex()); - assertEquals("_doc", deleteResponse.getType()); assertEquals(docId, deleteResponse.getId()); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); } @@ -120,7 +117,6 @@ public void testDelete() throws IOException { DeleteRequest deleteRequest = new DeleteRequest("index", docId); DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync); assertEquals("index", deleteResponse.getIndex()); - assertEquals("_doc", deleteResponse.getType()); assertEquals(docId, deleteResponse.getId()); assertEquals(DocWriteResponse.Result.NOT_FOUND, deleteResponse.getResult()); } @@ -159,7 +155,6 @@ public void testDelete() throws IOException { DeleteRequest deleteRequest = new DeleteRequest("index", docId).versionType(VersionType.EXTERNAL).version(13); DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync); assertEquals("index", deleteResponse.getIndex()); - assertEquals("_doc", deleteResponse.getType()); assertEquals(docId, deleteResponse.getId()); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); } @@ -196,7 +191,6 @@ public void testDelete() throws IOException { DeleteRequest deleteRequest = new DeleteRequest("index", docId).routing("foo"); DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync); assertEquals("index", deleteResponse.getIndex()); - assertEquals("_doc", deleteResponse.getType()); assertEquals(docId, deleteResponse.getId()); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); } @@ -337,7 +331,6 @@ public void testGet() throws IOException { } GetResponse getResponse = execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync); assertEquals("index", getResponse.getIndex()); - assertEquals("_doc", getResponse.getType()); assertEquals("id", getResponse.getId()); assertTrue(getResponse.isExists()); assertFalse(getResponse.isSourceEmpty()); @@ -348,7 +341,6 @@ public void testGet() throws IOException { GetRequest getRequest = new GetRequest("index", "does_not_exist"); GetResponse getResponse = execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync); assertEquals("index", getResponse.getIndex()); - assertEquals("_doc", getResponse.getType()); assertEquals("does_not_exist", getResponse.getId()); assertFalse(getResponse.isExists()); assertEquals(-1, getResponse.getVersion()); @@ -360,7 +352,6 @@ public void testGet() throws IOException { getRequest.fetchSourceContext(new FetchSourceContext(false, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY)); GetResponse getResponse = execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync); assertEquals("index", getResponse.getIndex()); - assertEquals("_doc", getResponse.getType()); assertEquals("id", getResponse.getId()); assertTrue(getResponse.isExists()); assertTrue(getResponse.isSourceEmpty()); @@ -376,7 +367,6 @@ public void testGet() throws IOException { } GetResponse getResponse = execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync); assertEquals("index", getResponse.getIndex()); - assertEquals("_doc", getResponse.getType()); assertEquals("id", getResponse.getId()); assertTrue(getResponse.isExists()); assertFalse(getResponse.isSourceEmpty()); @@ -398,7 +388,6 @@ public void testMultiGet() throws IOException { assertTrue(response.getResponses()[0].isFailed()); assertNull(response.getResponses()[0].getResponse()); assertEquals("id1", response.getResponses()[0].getFailure().getId()); - assertNull(response.getResponses()[0].getFailure().getType()); assertEquals("index", response.getResponses()[0].getFailure().getIndex()); assertEquals( "OpenSearch exception [type=index_not_found_exception, reason=no such index [index]]", @@ -408,7 +397,6 @@ public void testMultiGet() throws IOException { assertTrue(response.getResponses()[1].isFailed()); assertNull(response.getResponses()[1].getResponse()); assertEquals("id2", response.getResponses()[1].getId()); - assertNull(response.getResponses()[1].getType()); assertEquals("index", response.getResponses()[1].getIndex()); assertEquals( "OpenSearch exception [type=index_not_found_exception, reason=no such index [index]]", @@ -434,47 +422,26 @@ public void testMultiGet() throws IOException { assertFalse(response.getResponses()[0].isFailed()); assertNull(response.getResponses()[0].getFailure()); assertEquals("id1", response.getResponses()[0].getId()); - assertEquals("_doc", response.getResponses()[0].getType()); assertEquals("index", response.getResponses()[0].getIndex()); assertEquals(Collections.singletonMap("field", "value1"), response.getResponses()[0].getResponse().getSource()); assertFalse(response.getResponses()[1].isFailed()); assertNull(response.getResponses()[1].getFailure()); assertEquals("id2", response.getResponses()[1].getId()); - assertEquals("_doc", response.getResponses()[1].getType()); assertEquals("index", response.getResponses()[1].getIndex()); assertEquals(Collections.singletonMap("field", "value2"), response.getResponses()[1].getResponse().getSource()); } } - public void testMultiGetWithTypes() throws IOException { + public void testMultiGetWithIds() throws IOException { BulkRequest bulk = new BulkRequest(); bulk.setRefreshPolicy(RefreshPolicy.IMMEDIATE); - bulk.add(new IndexRequest("index", "type", "id1").source("{\"field\":\"value1\"}", XContentType.JSON)); - bulk.add(new IndexRequest("index", "type", "id2").source("{\"field\":\"value2\"}", XContentType.JSON)); + bulk.add(new IndexRequest("index").id("id1").source("{\"field\":\"value1\"}", XContentType.JSON)); + bulk.add(new IndexRequest("index").id("id2").source("{\"field\":\"value2\"}", XContentType.JSON)); - highLevelClient().bulk(bulk, expectWarningsOnce(RestBulkAction.TYPES_DEPRECATION_MESSAGE)); MultiGetRequest multiGetRequest = new MultiGetRequest(); multiGetRequest.add("index", "id1"); - multiGetRequest.add("index", "type", "id2"); - - MultiGetResponse response = execute( - multiGetRequest, - highLevelClient()::mget, - highLevelClient()::mgetAsync, - expectWarningsOnce(RestMultiGetAction.TYPES_DEPRECATION_MESSAGE) - ); - assertEquals(2, response.getResponses().length); - - GetResponse firstResponse = response.getResponses()[0].getResponse(); - assertEquals("index", firstResponse.getIndex()); - assertEquals("type", firstResponse.getType()); - assertEquals("id1", firstResponse.getId()); - - GetResponse secondResponse = response.getResponses()[1].getResponse(); - assertEquals("index", secondResponse.getIndex()); - assertEquals("type", secondResponse.getType()); - assertEquals("id2", secondResponse.getId()); + multiGetRequest.add("index", "id2"); } public void testGetSource() throws IOException { @@ -509,7 +476,7 @@ public void testGetSource() throws IOException { ); assertEquals(RestStatus.NOT_FOUND, exception.status()); assertEquals( - "OpenSearch exception [type=resource_not_found_exception, " + "reason=Document not found [index]/[_doc]/[does_not_exist]]", + "OpenSearch exception [type=resource_not_found_exception, " + "reason=Document not found [index]/[does_not_exist]]", exception.getMessage() ); } @@ -563,7 +530,6 @@ public void testIndex() throws IOException { assertEquals(RestStatus.CREATED, indexResponse.status()); assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); assertEquals("index", indexResponse.getIndex()); - assertEquals("_doc", indexResponse.getType()); assertTrue(Strings.hasLength(indexResponse.getId())); assertEquals(1L, indexResponse.getVersion()); assertNotNull(indexResponse.getShardId()); @@ -583,7 +549,6 @@ public void testIndex() throws IOException { IndexResponse indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); assertEquals(RestStatus.CREATED, indexResponse.status()); assertEquals("index", indexResponse.getIndex()); - assertEquals("_doc", indexResponse.getType()); assertEquals("id", indexResponse.getId()); assertEquals(1L, indexResponse.getVersion()); @@ -593,7 +558,6 @@ public void testIndex() throws IOException { indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); assertEquals(RestStatus.OK, indexResponse.status()); assertEquals("index", indexResponse.getIndex()); - assertEquals("_doc", indexResponse.getType()); assertEquals("id", indexResponse.getId()); assertEquals(2L, indexResponse.getVersion()); @@ -651,7 +615,6 @@ public void testIndex() throws IOException { IndexResponse indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); assertEquals(RestStatus.CREATED, indexResponse.status()); assertEquals("index", indexResponse.getIndex()); - assertEquals("_doc", indexResponse.getType()); assertEquals("external_version_type", indexResponse.getId()); assertEquals(12L, indexResponse.getVersion()); } @@ -663,7 +626,6 @@ public void testIndex() throws IOException { IndexResponse indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); assertEquals(RestStatus.CREATED, indexResponse.status()); assertEquals("index", indexResponse.getIndex()); - assertEquals("_doc", indexResponse.getType()); assertEquals("with_create_op_type", indexResponse.getId()); OpenSearchStatusException exception = expectThrows( @@ -691,7 +653,7 @@ public void testUpdate() throws IOException { ); assertEquals(RestStatus.NOT_FOUND, exception.status()); assertEquals( - "OpenSearch exception [type=document_missing_exception, reason=[_doc][does_not_exist]: document missing]", + "OpenSearch exception [type=document_missing_exception, reason=[does_not_exist]: document missing]", exception.getMessage() ); } @@ -816,7 +778,6 @@ public void testUpdate() throws IOException { UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); assertEquals(RestStatus.CREATED, updateResponse.status()); assertEquals("index", updateResponse.getIndex()); - assertEquals("_doc", updateResponse.getType()); assertEquals("with_upsert", updateResponse.getId()); GetResult getResult = updateResponse.getGetResult(); assertEquals(1L, updateResponse.getVersion()); @@ -831,7 +792,6 @@ public void testUpdate() throws IOException { UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); assertEquals(RestStatus.CREATED, updateResponse.status()); assertEquals("index", updateResponse.getIndex()); - assertEquals("_doc", updateResponse.getType()); assertEquals("with_doc_as_upsert", updateResponse.getId()); GetResult getResult = updateResponse.getGetResult(); assertEquals(1L, updateResponse.getVersion()); @@ -847,7 +807,6 @@ public void testUpdate() throws IOException { UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); assertEquals(RestStatus.CREATED, updateResponse.status()); assertEquals("index", updateResponse.getIndex()); - assertEquals("_doc", updateResponse.getType()); assertEquals("with_scripted_upsert", updateResponse.getId()); GetResult getResult = updateResponse.getGetResult(); @@ -1043,7 +1002,6 @@ private void validateBulkResponses(int nbItems, boolean[] errors, BulkResponse b assertEquals(i, bulkItemResponse.getItemId()); assertEquals("index", bulkItemResponse.getIndex()); - assertEquals("_doc", bulkItemResponse.getType()); assertEquals(String.valueOf(i), bulkItemResponse.getId()); DocWriteRequest.OpType requestOpType = bulkRequest.requests().get(i).opType(); @@ -1069,7 +1027,6 @@ public void testUrlEncode() throws IOException { indexRequest.source("field", "value"); IndexResponse indexResponse = highLevelClient().index(indexRequest, RequestOptions.DEFAULT); assertEquals(expectedIndex, indexResponse.getIndex()); - assertEquals("_doc", indexResponse.getType()); assertEquals("id#1", indexResponse.getId()); } { @@ -1077,7 +1034,6 @@ public void testUrlEncode() throws IOException { GetResponse getResponse = highLevelClient().get(getRequest, RequestOptions.DEFAULT); assertTrue(getResponse.isExists()); assertEquals(expectedIndex, getResponse.getIndex()); - assertEquals("_doc", getResponse.getType()); assertEquals("id#1", getResponse.getId()); } @@ -1087,7 +1043,6 @@ public void testUrlEncode() throws IOException { indexRequest.source("field", "value"); IndexResponse indexResponse = highLevelClient().index(indexRequest, RequestOptions.DEFAULT); assertEquals("index", indexResponse.getIndex()); - assertEquals("_doc", indexResponse.getType()); assertEquals(docId, indexResponse.getId()); } { @@ -1095,7 +1050,6 @@ public void testUrlEncode() throws IOException { GetResponse getResponse = highLevelClient().get(getRequest, RequestOptions.DEFAULT); assertTrue(getResponse.isExists()); assertEquals("index", getResponse.getIndex()); - assertEquals("_doc", getResponse.getType()); assertEquals(docId, getResponse.getId()); } @@ -1111,7 +1065,6 @@ public void testParamsEncode() throws IOException { indexRequest.routing(routing); IndexResponse indexResponse = highLevelClient().index(indexRequest, RequestOptions.DEFAULT); assertEquals("index", indexResponse.getIndex()); - assertEquals("_doc", indexResponse.getType()); assertEquals("id", indexResponse.getId()); } { @@ -1119,7 +1072,6 @@ public void testParamsEncode() throws IOException { GetResponse getResponse = highLevelClient().get(getRequest, RequestOptions.DEFAULT); assertTrue(getResponse.isExists()); assertEquals("index", getResponse.getIndex()); - assertEquals("_doc", getResponse.getType()); assertEquals("id", getResponse.getId()); assertEquals(routing, getResponse.getField("_routing").getValue()); } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java index 043a75d28a301..f9c8851f8839e 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java @@ -122,18 +122,9 @@ import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.common.xcontent.support.XContentMapValues; import org.opensearch.index.IndexSettings; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.rest.RestStatus; -import org.opensearch.rest.action.admin.indices.RestCreateIndexAction; -import org.opensearch.rest.action.admin.indices.RestGetFieldMappingAction; -import org.opensearch.rest.action.admin.indices.RestGetIndexTemplateAction; -import org.opensearch.rest.action.admin.indices.RestGetIndicesAction; -import org.opensearch.rest.action.admin.indices.RestGetMappingAction; -import org.opensearch.rest.action.admin.indices.RestPutIndexTemplateAction; -import org.opensearch.rest.action.admin.indices.RestPutMappingAction; -import org.opensearch.rest.action.admin.indices.RestRolloverIndexAction; import java.io.IOException; import java.util.Arrays; @@ -201,18 +192,6 @@ public void testIndicesExists() throws IOException { } } - public void testIndicesExistsWithTypes() throws IOException { - // Index present - String indexName = "test_index_exists_index_present"; - createIndex(indexName, Settings.EMPTY); - - org.opensearch.action.admin.indices.get.GetIndexRequest request = new org.opensearch.action.admin.indices.get.GetIndexRequest(); - request.indices(indexName); - - boolean response = execute(request, highLevelClient().indices()::exists, highLevelClient().indices()::existsAsync); - assertTrue(response); - } - @SuppressWarnings({ "unchecked", "rawtypes" }) public void testCreateIndex() throws IOException { { @@ -277,74 +256,6 @@ public void testCreateIndex() throws IOException { } } - @SuppressWarnings({ "unchecked", "rawtypes" }) - public void testCreateIndexWithTypes() throws IOException { - { - // Create index - String indexName = "plain_index"; - assertFalse(indexExists(indexName)); - - org.opensearch.action.admin.indices.create.CreateIndexRequest createIndexRequest = - new org.opensearch.action.admin.indices.create.CreateIndexRequest(indexName); - - org.opensearch.action.admin.indices.create.CreateIndexResponse createIndexResponse = execute( - createIndexRequest, - highLevelClient().indices()::create, - highLevelClient().indices()::createAsync, - expectWarningsOnce(RestCreateIndexAction.TYPES_DEPRECATION_MESSAGE) - ); - assertTrue(createIndexResponse.isAcknowledged()); - - assertTrue(indexExists(indexName)); - } - { - // Create index with mappings, aliases and settings - String indexName = "rich_index"; - assertFalse(indexExists(indexName)); - - org.opensearch.action.admin.indices.create.CreateIndexRequest createIndexRequest = - new org.opensearch.action.admin.indices.create.CreateIndexRequest(indexName); - - Alias alias = new Alias("alias_name"); - alias.filter("{\"term\":{\"year\":2016}}"); - alias.routing("1"); - createIndexRequest.alias(alias); - - Settings.Builder settings = Settings.builder(); - settings.put(SETTING_NUMBER_OF_REPLICAS, 2); - createIndexRequest.settings(settings); - - XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); - mappingBuilder.startObject().startObject("properties").startObject("field"); - mappingBuilder.field("type", "text"); - mappingBuilder.endObject().endObject().endObject(); - createIndexRequest.mapping(MapperService.SINGLE_MAPPING_NAME, mappingBuilder); - - org.opensearch.action.admin.indices.create.CreateIndexResponse createIndexResponse = execute( - createIndexRequest, - highLevelClient().indices()::create, - highLevelClient().indices()::createAsync, - expectWarningsOnce(RestCreateIndexAction.TYPES_DEPRECATION_MESSAGE) - ); - assertTrue(createIndexResponse.isAcknowledged()); - - Map getIndexResponse = getAsMap(indexName); - assertEquals("2", XContentMapValues.extractValue(indexName + ".settings.index.number_of_replicas", getIndexResponse)); - - Map aliasData = (Map) XContentMapValues.extractValue( - indexName + ".aliases.alias_name", - getIndexResponse - ); - assertNotNull(aliasData); - assertEquals("1", aliasData.get("index_routing")); - Map filter = (Map) aliasData.get("filter"); - Map term = (Map) filter.get("term"); - assertEquals(2016, term.get("year")); - - assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings.properties.field.type", getIndexResponse)); - } - } - public void testGetSettings() throws IOException { String indexName = "get_settings_index"; Settings basicSettings = Settings.builder().put("number_of_shards", 1).put("number_of_replicas", 0).build(); @@ -490,33 +401,6 @@ public void testGetIndex() throws IOException { assertEquals("integer", fieldMapping.get("type")); } - @SuppressWarnings("unchecked") - public void testGetIndexWithTypes() throws IOException { - String indexName = "get_index_test"; - Settings basicSettings = Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0).build(); - String mappings = "\"properties\":{\"field-1\":{\"type\":\"integer\"}}"; - createIndex(indexName, basicSettings, mappings); - - org.opensearch.action.admin.indices.get.GetIndexRequest getIndexRequest = - new org.opensearch.action.admin.indices.get.GetIndexRequest().indices(indexName).includeDefaults(false); - org.opensearch.action.admin.indices.get.GetIndexResponse getIndexResponse = execute( - getIndexRequest, - highLevelClient().indices()::get, - highLevelClient().indices()::getAsync, - expectWarningsOnce(RestGetIndicesAction.TYPES_DEPRECATION_MESSAGE) - ); - - // default settings should be null - assertNull(getIndexResponse.getSetting(indexName, "index.refresh_interval")); - assertEquals("1", getIndexResponse.getSetting(indexName, SETTING_NUMBER_OF_SHARDS)); - assertEquals("0", getIndexResponse.getSetting(indexName, SETTING_NUMBER_OF_REPLICAS)); - assertNotNull(getIndexResponse.getMappings().get(indexName)); - MappingMetadata mappingMetadata = getIndexResponse.getMappings().get(indexName).get("_doc"); - assertNotNull(mappingMetadata); - assertEquals("_doc", mappingMetadata.type()); - assertEquals("{\"properties\":{\"field-1\":{\"type\":\"integer\"}}}", mappingMetadata.source().string()); - } - @SuppressWarnings("unchecked") public void testGetIndexWithDefaults() throws IOException { String indexName = "get_index_test"; @@ -581,32 +465,6 @@ public void testPutMapping() throws IOException { assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings.properties.field.type", getIndexResponse)); } - public void testPutMappingWithTypes() throws IOException { - String indexName = "mapping_index"; - createIndex(indexName, Settings.EMPTY); - - org.opensearch.action.admin.indices.mapping.put.PutMappingRequest putMappingRequest = - new org.opensearch.action.admin.indices.mapping.put.PutMappingRequest(indexName); - putMappingRequest.type("some_type"); - - XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); - mappingBuilder.startObject().startObject("properties").startObject("field"); - mappingBuilder.field("type", "text"); - mappingBuilder.endObject().endObject().endObject(); - putMappingRequest.source(mappingBuilder); - - AcknowledgedResponse putMappingResponse = execute( - putMappingRequest, - highLevelClient().indices()::putMapping, - highLevelClient().indices()::putMappingAsync, - expectWarningsOnce(RestPutMappingAction.TYPES_DEPRECATION_MESSAGE) - ); - assertTrue(putMappingResponse.isAcknowledged()); - - Map getIndexResponse = getAsMap(indexName); - assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings.properties.field.type", getIndexResponse)); - } - public void testGetMapping() throws IOException { String indexName = "test"; createIndex(indexName, Settings.EMPTY); @@ -646,47 +504,6 @@ public void testGetMapping() throws IOException { assertThat(mappings, equalTo(expected)); } - public void testGetMappingWithTypes() throws IOException { - String indexName = "test"; - createIndex(indexName, Settings.EMPTY); - - PutMappingRequest putMappingRequest = new PutMappingRequest(indexName); - XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); - mappingBuilder.startObject().startObject("properties").startObject("field"); - mappingBuilder.field("type", "text"); - mappingBuilder.endObject().endObject().endObject(); - putMappingRequest.source(mappingBuilder); - - AcknowledgedResponse putMappingResponse = execute( - putMappingRequest, - highLevelClient().indices()::putMapping, - highLevelClient().indices()::putMappingAsync - ); - assertTrue(putMappingResponse.isAcknowledged()); - - Map getIndexResponse = getAsMap(indexName); - assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings.properties.field.type", getIndexResponse)); - - org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest request = - new org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest().indices(indexName); - - org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse getMappingsResponse = execute( - request, - highLevelClient().indices()::getMapping, - highLevelClient().indices()::getMappingAsync, - expectWarningsOnce(RestGetMappingAction.TYPES_DEPRECATION_MESSAGE) - ); - - Map mappings = getMappingsResponse.getMappings().get(indexName).get("_doc").sourceAsMap(); - Map type = new HashMap<>(); - type.put("type", "text"); - Map field = new HashMap<>(); - field.put("field", type); - Map expected = new HashMap<>(); - expected.put("properties", field); - assertThat(mappings, equalTo(expected)); - } - public void testGetFieldMapping() throws IOException { String indexName = "test"; createIndex(indexName, Settings.EMPTY); @@ -723,45 +540,6 @@ public void testGetFieldMapping() throws IOException { assertThat(fieldMappingMap, equalTo(Collections.singletonMap("field", metadata))); } - public void testGetFieldMappingWithTypes() throws IOException { - String indexName = "test"; - createIndex(indexName, Settings.EMPTY); - - PutMappingRequest putMappingRequest = new PutMappingRequest(indexName); - XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); - mappingBuilder.startObject().startObject("properties").startObject("field"); - mappingBuilder.field("type", "text"); - mappingBuilder.endObject().endObject().endObject(); - putMappingRequest.source(mappingBuilder); - - AcknowledgedResponse putMappingResponse = execute( - putMappingRequest, - highLevelClient().indices()::putMapping, - highLevelClient().indices()::putMappingAsync - ); - assertTrue(putMappingResponse.isAcknowledged()); - - org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsRequest getFieldMappingsRequest = - new org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsRequest().indices(indexName).types("_doc").fields("field"); - - org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse getFieldMappingsResponse = execute( - getFieldMappingsRequest, - highLevelClient().indices()::getFieldMapping, - highLevelClient().indices()::getFieldMappingAsync, - expectWarningsOnce(RestGetFieldMappingAction.TYPES_DEPRECATION_MESSAGE) - ); - - final Map fieldMappingMap = - getFieldMappingsResponse.mappings().get(indexName).get("_doc"); - - final org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata metadata = - new org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata( - "field", - new BytesArray("{\"field\":{\"type\":\"text\"}}") - ); - assertThat(fieldMappingMap, equalTo(Collections.singletonMap("field", metadata))); - } - public void testDeleteIndex() throws IOException { { // Delete index if exists @@ -1316,33 +1094,6 @@ public void testRollover() throws IOException { } } - public void testRolloverWithTypes() throws IOException { - highLevelClient().indices().create(new CreateIndexRequest("test").alias(new Alias("alias")), RequestOptions.DEFAULT); - highLevelClient().index(new IndexRequest("test").id("1").source("field", "value"), RequestOptions.DEFAULT); - highLevelClient().index( - new IndexRequest("test").id("2").source("field", "value").setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL), - RequestOptions.DEFAULT - ); - - org.opensearch.action.admin.indices.rollover.RolloverRequest rolloverRequest = - new org.opensearch.action.admin.indices.rollover.RolloverRequest("alias", "test_new"); - rolloverRequest.addMaxIndexDocsCondition(1); - rolloverRequest.getCreateIndexRequest().mapping("_doc", "field2", "type=keyword"); - - org.opensearch.action.admin.indices.rollover.RolloverResponse rolloverResponse = execute( - rolloverRequest, - highLevelClient().indices()::rollover, - highLevelClient().indices()::rolloverAsync, - expectWarningsOnce(RestRolloverIndexAction.TYPES_DEPRECATION_MESSAGE) - ); - assertTrue(rolloverResponse.isRolledOver()); - assertFalse(rolloverResponse.isDryRun()); - Map conditionStatus = rolloverResponse.getConditionStatus(); - assertTrue(conditionStatus.get("[max_docs: 1]")); - assertEquals("test", rolloverResponse.getOldIndex()); - assertEquals("test_new", rolloverResponse.getNewIndex()); - } - public void testGetAlias() throws IOException { { createIndex("index1", Settings.EMPTY); @@ -1731,38 +1482,6 @@ public void testIndexPutSettingNonExistent() throws IOException { ); } - @SuppressWarnings("unchecked") - public void testPutTemplateWithTypes() throws Exception { - org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putTemplateRequest = - new org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest().name("my-template") - .patterns(Arrays.asList("pattern-1", "name-*")) - .order(10) - .create(randomBoolean()) - .settings(Settings.builder().put("number_of_shards", "3").put("number_of_replicas", "0")) - .mapping("doc", "host_name", "type=keyword", "description", "type=text") - .alias(new Alias("alias-1").indexRouting("abc")) - .alias(new Alias("{index}-write").searchRouting("xyz")); - - AcknowledgedResponse putTemplateResponse = execute( - putTemplateRequest, - highLevelClient().indices()::putTemplate, - highLevelClient().indices()::putTemplateAsync, - expectWarningsOnce(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ); - assertThat(putTemplateResponse.isAcknowledged(), equalTo(true)); - - Map templates = getAsMap("/_template/my-template"); - assertThat(templates.keySet(), hasSize(1)); - assertThat(extractValue("my-template.order", templates), equalTo(10)); - assertThat(extractRawValues("my-template.index_patterns", templates), contains("pattern-1", "name-*")); - assertThat(extractValue("my-template.settings.index.number_of_shards", templates), equalTo("3")); - assertThat(extractValue("my-template.settings.index.number_of_replicas", templates), equalTo("0")); - assertThat(extractValue("my-template.mappings.properties.host_name.type", templates), equalTo("keyword")); - assertThat(extractValue("my-template.mappings.properties.description.type", templates), equalTo("text")); - assertThat((Map) extractValue("my-template.aliases.alias-1", templates), hasEntry("index_routing", "abc")); - assertThat((Map) extractValue("my-template.aliases.{index}-write", templates), hasEntry("search_routing", "xyz")); - } - @SuppressWarnings("unchecked") public void testPutTemplate() throws Exception { PutIndexTemplateRequest putTemplateRequest = new PutIndexTemplateRequest("my-template").patterns( @@ -1855,48 +1574,6 @@ public void testPutTemplateWithTypesUsingUntypedAPI() throws Exception { ); } - @SuppressWarnings("unchecked") - public void testPutTemplateWithNoTypesUsingTypedApi() throws Exception { - org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putTemplateRequest = - new org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest().name("my-template") - .patterns(Arrays.asList("pattern-1", "name-*")) - .order(10) - .create(randomBoolean()) - .settings(Settings.builder().put("number_of_shards", "3").put("number_of_replicas", "0")) - .mapping( - "my_doc_type", - // Note that the declared type is missing from the mapping - "{ " - + "\"properties\":{" - + "\"host_name\": {\"type\":\"keyword\"}," - + "\"description\": {\"type\":\"text\"}" - + "}" - + "}", - XContentType.JSON - ) - .alias(new Alias("alias-1").indexRouting("abc")) - .alias(new Alias("{index}-write").searchRouting("xyz")); - - AcknowledgedResponse putTemplateResponse = execute( - putTemplateRequest, - highLevelClient().indices()::putTemplate, - highLevelClient().indices()::putTemplateAsync, - expectWarningsOnce(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ); - assertThat(putTemplateResponse.isAcknowledged(), equalTo(true)); - - Map templates = getAsMap("/_template/my-template"); - assertThat(templates.keySet(), hasSize(1)); - assertThat(extractValue("my-template.order", templates), equalTo(10)); - assertThat(extractRawValues("my-template.index_patterns", templates), contains("pattern-1", "name-*")); - assertThat(extractValue("my-template.settings.index.number_of_shards", templates), equalTo("3")); - assertThat(extractValue("my-template.settings.index.number_of_replicas", templates), equalTo("0")); - assertThat(extractValue("my-template.mappings.properties.host_name.type", templates), equalTo("keyword")); - assertThat(extractValue("my-template.mappings.properties.description.type", templates), equalTo("text")); - assertThat((Map) extractValue("my-template.aliases.alias-1", templates), hasEntry("index_routing", "abc")); - assertThat((Map) extractValue("my-template.aliases.{index}-write", templates), hasEntry("search_routing", "xyz")); - } - public void testPutTemplateBadRequests() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -1978,157 +1655,6 @@ public void testInvalidValidateQuery() throws IOException { assertFalse(response.isValid()); } - // Tests the deprecated form of the API that returns templates with doc types (using the server-side's GetIndexTemplateResponse) - public void testCRUDIndexTemplateWithTypes() throws Exception { - RestHighLevelClient client = highLevelClient(); - - org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putTemplate1 = - new org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest().name("template-1") - .patterns(Arrays.asList("pattern-1", "name-1")) - .alias(new Alias("alias-1")); - assertThat( - execute( - putTemplate1, - client.indices()::putTemplate, - client.indices()::putTemplateAsync, - expectWarningsOnce(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ).isAcknowledged(), - equalTo(true) - ); - org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putTemplate2 = - new org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest().name("template-2") - .patterns(Arrays.asList("pattern-2", "name-2")) - .mapping("custom_doc_type", "name", "type=text") - .settings(Settings.builder().put("number_of_shards", "2").put("number_of_replicas", "0")); - assertThat( - execute( - putTemplate2, - client.indices()::putTemplate, - client.indices()::putTemplateAsync, - expectWarningsOnce(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ).isAcknowledged(), - equalTo(true) - ); - - org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse getTemplate1 = execute( - new GetIndexTemplatesRequest("template-1"), - client.indices()::getTemplate, - client.indices()::getTemplateAsync, - expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ); - assertThat(getTemplate1.getIndexTemplates(), hasSize(1)); - org.opensearch.cluster.metadata.IndexTemplateMetadata template1 = getTemplate1.getIndexTemplates().get(0); - assertThat(template1.name(), equalTo("template-1")); - assertThat(template1.patterns(), contains("pattern-1", "name-1")); - assertTrue(template1.aliases().containsKey("alias-1")); - - // Check the typed version of the call - org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse getTemplate2 = execute( - new GetIndexTemplatesRequest("template-2"), - client.indices()::getTemplate, - client.indices()::getTemplateAsync, - expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ); - assertThat(getTemplate2.getIndexTemplates(), hasSize(1)); - org.opensearch.cluster.metadata.IndexTemplateMetadata template2 = getTemplate2.getIndexTemplates().get(0); - assertThat(template2.name(), equalTo("template-2")); - assertThat(template2.patterns(), contains("pattern-2", "name-2")); - assertTrue(template2.aliases().isEmpty()); - assertThat(template2.settings().get("index.number_of_shards"), equalTo("2")); - assertThat(template2.settings().get("index.number_of_replicas"), equalTo("0")); - // Ugly deprecated form of API requires use of doc type to get at mapping object which is CompressedXContent - assertTrue(template2.mappings().containsKey("custom_doc_type")); - - List names = randomBoolean() ? Arrays.asList("*plate-1", "template-2") : Arrays.asList("template-*"); - GetIndexTemplatesRequest getBothRequest = new GetIndexTemplatesRequest(names); - org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse getBoth = execute( - getBothRequest, - client.indices()::getTemplate, - client.indices()::getTemplateAsync, - expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ); - assertThat(getBoth.getIndexTemplates(), hasSize(2)); - assertThat( - getBoth.getIndexTemplates().stream().map(org.opensearch.cluster.metadata.IndexTemplateMetadata::getName).toArray(), - arrayContainingInAnyOrder("template-1", "template-2") - ); - - GetIndexTemplatesRequest getAllRequest = new GetIndexTemplatesRequest(); - org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse getAll = execute( - getAllRequest, - client.indices()::getTemplate, - client.indices()::getTemplateAsync, - expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ); - assertThat(getAll.getIndexTemplates().size(), greaterThanOrEqualTo(2)); - assertThat( - getAll.getIndexTemplates() - .stream() - .map(org.opensearch.cluster.metadata.IndexTemplateMetadata::getName) - .collect(Collectors.toList()), - hasItems("template-1", "template-2") - ); - - assertTrue( - execute(new DeleteIndexTemplateRequest("template-1"), client.indices()::deleteTemplate, client.indices()::deleteTemplateAsync) - .isAcknowledged() - ); - assertThat( - expectThrows( - OpenSearchException.class, - () -> execute(new GetIndexTemplatesRequest("template-1"), client.indices()::getTemplate, client.indices()::getTemplateAsync) - ).status(), - equalTo(RestStatus.NOT_FOUND) - ); - assertThat( - expectThrows( - OpenSearchException.class, - () -> execute( - new DeleteIndexTemplateRequest("template-1"), - client.indices()::deleteTemplate, - client.indices()::deleteTemplateAsync - ) - ).status(), - equalTo(RestStatus.NOT_FOUND) - ); - - assertThat( - execute( - new GetIndexTemplatesRequest("template-*"), - client.indices()::getTemplate, - client.indices()::getTemplateAsync, - expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ).getIndexTemplates(), - hasSize(1) - ); - assertThat( - execute( - new GetIndexTemplatesRequest("template-*"), - client.indices()::getTemplate, - client.indices()::getTemplateAsync, - expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ).getIndexTemplates().get(0).name(), - equalTo("template-2") - ); - - assertTrue( - execute(new DeleteIndexTemplateRequest("template-*"), client.indices()::deleteTemplate, client.indices()::deleteTemplateAsync) - .isAcknowledged() - ); - assertThat( - expectThrows( - OpenSearchException.class, - () -> execute( - new GetIndexTemplatesRequest("template-*"), - client.indices()::getTemplate, - client.indices()::getTemplateAsync, - expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ) - ).status(), - equalTo(RestStatus.NOT_FOUND) - ); - } - public void testCRUDIndexTemplate() throws Exception { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java index 0ea2280b386eb..7276cbb44b030 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java @@ -96,7 +96,6 @@ import static org.opensearch.index.RandomCreateIndexGenerator.randomAlias; import static org.opensearch.index.RandomCreateIndexGenerator.randomIndexSettings; import static org.opensearch.index.alias.RandomAliasActionsGenerator.randomAliasAction; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.nullValue; @@ -140,40 +139,6 @@ public void testIndicesExistEmptyIndices() { ); } - public void testIndicesExistEmptyIndicesWithTypes() { - LuceneTestCase.expectThrows( - IllegalArgumentException.class, - () -> IndicesRequestConverters.indicesExist(new org.opensearch.action.admin.indices.get.GetIndexRequest()) - ); - LuceneTestCase.expectThrows( - IllegalArgumentException.class, - () -> IndicesRequestConverters.indicesExist( - new org.opensearch.action.admin.indices.get.GetIndexRequest().indices((String[]) null) - ) - ); - } - - public void testIndicesExistWithTypes() { - String[] indices = RequestConvertersTests.randomIndicesNames(1, 10); - - org.opensearch.action.admin.indices.get.GetIndexRequest getIndexRequest = - new org.opensearch.action.admin.indices.get.GetIndexRequest().indices(indices); - - Map expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomIndicesOptions(getIndexRequest::indicesOptions, getIndexRequest::indicesOptions, expectedParams); - RequestConvertersTests.setRandomLocal(getIndexRequest::local, expectedParams); - RequestConvertersTests.setRandomHumanReadable(getIndexRequest::humanReadable, expectedParams); - RequestConvertersTests.setRandomIncludeDefaults(getIndexRequest::includeDefaults, expectedParams); - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - - final Request request = IndicesRequestConverters.indicesExist(getIndexRequest); - - Assert.assertEquals(HttpHead.METHOD_NAME, request.getMethod()); - Assert.assertEquals("/" + String.join(",", indices), request.getEndpoint()); - Assert.assertThat(expectedParams, equalTo(request.getParameters())); - Assert.assertNull(request.getEntity()); - } - public void testCreateIndex() throws IOException { CreateIndexRequest createIndexRequest = RandomCreateIndexGenerator.randomCreateIndexRequest(); @@ -189,23 +154,6 @@ public void testCreateIndex() throws IOException { RequestConvertersTests.assertToXContentBody(createIndexRequest, request.getEntity()); } - public void testCreateIndexWithTypes() throws IOException { - org.opensearch.action.admin.indices.create.CreateIndexRequest createIndexRequest = org.opensearch.index.RandomCreateIndexGenerator - .randomCreateIndexRequest(); - - Map expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomTimeout(createIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - RequestConvertersTests.setRandomMasterTimeout(createIndexRequest, expectedParams); - RequestConvertersTests.setRandomWaitForActiveShards(createIndexRequest::waitForActiveShards, expectedParams); - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - - Request request = IndicesRequestConverters.createIndex(createIndexRequest); - Assert.assertEquals("/" + createIndexRequest.index(), request.getEndpoint()); - Assert.assertEquals(expectedParams, request.getParameters()); - Assert.assertEquals(HttpPut.METHOD_NAME, request.getMethod()); - RequestConvertersTests.assertToXContentBody(createIndexRequest, request.getEntity()); - } - public void testCreateIndexNullIndex() { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new CreateIndexRequest(null)); assertEquals(e.getMessage(), "The index name cannot be null."); @@ -254,37 +202,6 @@ public void testPutMapping() throws IOException { RequestConvertersTests.assertToXContentBody(putMappingRequest, request.getEntity()); } - public void testPutMappingWithTypes() throws IOException { - org.opensearch.action.admin.indices.mapping.put.PutMappingRequest putMappingRequest = - new org.opensearch.action.admin.indices.mapping.put.PutMappingRequest(); - - String[] indices = RequestConvertersTests.randomIndicesNames(0, 5); - putMappingRequest.indices(indices); - - String type = OpenSearchTestCase.randomAlphaOfLengthBetween(3, 10); - putMappingRequest.type(type); - - Map expectedParams = new HashMap<>(); - - RequestConvertersTests.setRandomTimeout(putMappingRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - RequestConvertersTests.setRandomMasterTimeout(putMappingRequest, expectedParams); - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - - Request request = IndicesRequestConverters.putMapping(putMappingRequest); - StringJoiner endpoint = new StringJoiner("/", "/", ""); - String index = String.join(",", indices); - if (Strings.hasLength(index)) { - endpoint.add(index); - } - endpoint.add("_mapping"); - endpoint.add(type); - Assert.assertEquals(endpoint.toString(), request.getEndpoint()); - - Assert.assertEquals(expectedParams, request.getParameters()); - Assert.assertEquals(HttpPut.METHOD_NAME, request.getMethod()); - RequestConvertersTests.assertToXContentBody(putMappingRequest, request.getEntity()); - } - public void testGetMapping() { GetMappingsRequest getMappingRequest = new GetMappingsRequest(); @@ -318,53 +235,6 @@ public void testGetMapping() { Assert.assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); } - public void testGetMappingWithTypes() { - org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest getMappingRequest = - new org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest(); - - String[] indices = Strings.EMPTY_ARRAY; - if (randomBoolean()) { - indices = RequestConvertersTests.randomIndicesNames(0, 5); - getMappingRequest.indices(indices); - } else if (randomBoolean()) { - getMappingRequest.indices((String[]) null); - } - - String type = null; - if (randomBoolean()) { - type = randomAlphaOfLengthBetween(3, 10); - getMappingRequest.types(type); - } else if (randomBoolean()) { - getMappingRequest.types((String[]) null); - } - - Map expectedParams = new HashMap<>(); - - RequestConvertersTests.setRandomIndicesOptions( - getMappingRequest::indicesOptions, - getMappingRequest::indicesOptions, - expectedParams - ); - RequestConvertersTests.setRandomMasterTimeout(getMappingRequest, expectedParams); - RequestConvertersTests.setRandomLocal(getMappingRequest::local, expectedParams); - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - - Request request = IndicesRequestConverters.getMappings(getMappingRequest); - StringJoiner endpoint = new StringJoiner("/", "/", ""); - String index = String.join(",", indices); - if (Strings.hasLength(index)) { - endpoint.add(index); - } - endpoint.add("_mapping"); - if (type != null) { - endpoint.add(type); - } - Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint())); - - Assert.assertThat(expectedParams, equalTo(request.getParameters())); - Assert.assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); - } - public void testGetFieldMapping() { GetFieldMappingsRequest getFieldMappingsRequest = new GetFieldMappingsRequest(); @@ -410,67 +280,6 @@ public void testGetFieldMapping() { Assert.assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); } - public void testGetFieldMappingWithTypes() { - org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsRequest getFieldMappingsRequest = - new org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsRequest(); - - String[] indices = Strings.EMPTY_ARRAY; - if (randomBoolean()) { - indices = RequestConvertersTests.randomIndicesNames(0, 5); - getFieldMappingsRequest.indices(indices); - } else if (randomBoolean()) { - getFieldMappingsRequest.indices((String[]) null); - } - - String type = null; - if (randomBoolean()) { - type = randomAlphaOfLengthBetween(3, 10); - getFieldMappingsRequest.types(type); - } else if (randomBoolean()) { - getFieldMappingsRequest.types((String[]) null); - } - - String[] fields = null; - if (randomBoolean()) { - fields = new String[randomIntBetween(1, 5)]; - for (int i = 0; i < fields.length; i++) { - fields[i] = randomAlphaOfLengthBetween(3, 10); - } - getFieldMappingsRequest.fields(fields); - } else if (randomBoolean()) { - getFieldMappingsRequest.fields((String[]) null); - } - - Map expectedParams = new HashMap<>(); - - RequestConvertersTests.setRandomIndicesOptions( - getFieldMappingsRequest::indicesOptions, - getFieldMappingsRequest::indicesOptions, - expectedParams - ); - RequestConvertersTests.setRandomLocal(getFieldMappingsRequest::local, expectedParams); - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - - Request request = IndicesRequestConverters.getFieldMapping(getFieldMappingsRequest); - StringJoiner endpoint = new StringJoiner("/", "/", ""); - String index = String.join(",", indices); - if (Strings.hasLength(index)) { - endpoint.add(index); - } - endpoint.add("_mapping"); - if (type != null) { - endpoint.add(type); - } - endpoint.add("field"); - if (fields != null) { - endpoint.add(String.join(",", fields)); - } - Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint())); - - Assert.assertThat(expectedParams, equalTo(request.getParameters())); - Assert.assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); - } - public void testPutDataStream() { String name = randomAlphaOfLength(10); CreateDataStreamRequest createDataStreamRequest = new CreateDataStreamRequest(name); @@ -603,41 +412,6 @@ public void testGetIndex() throws IOException { Assert.assertThat(request.getEntity(), nullValue()); } - public void testGetIndexWithTypes() throws IOException { - String[] indicesUnderTest = OpenSearchTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5); - - org.opensearch.action.admin.indices.get.GetIndexRequest getIndexRequest = - new org.opensearch.action.admin.indices.get.GetIndexRequest().indices(indicesUnderTest); - - Map expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomMasterTimeout(getIndexRequest, expectedParams); - RequestConvertersTests.setRandomIndicesOptions(getIndexRequest::indicesOptions, getIndexRequest::indicesOptions, expectedParams); - RequestConvertersTests.setRandomLocal(getIndexRequest::local, expectedParams); - RequestConvertersTests.setRandomHumanReadable(getIndexRequest::humanReadable, expectedParams); - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - - if (OpenSearchTestCase.randomBoolean()) { - // the request object will not have include_defaults present unless it is set to - // true - getIndexRequest.includeDefaults(OpenSearchTestCase.randomBoolean()); - if (getIndexRequest.includeDefaults()) { - expectedParams.put("include_defaults", Boolean.toString(true)); - } - } - - StringJoiner endpoint = new StringJoiner("/", "/", ""); - if (indicesUnderTest != null && indicesUnderTest.length > 0) { - endpoint.add(String.join(",", indicesUnderTest)); - } - - Request request = IndicesRequestConverters.getIndex(getIndexRequest); - - Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint())); - Assert.assertThat(request.getParameters(), equalTo(expectedParams)); - Assert.assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME)); - Assert.assertThat(request.getEntity(), nullValue()); - } - public void testDeleteIndexEmptyIndices() { String[] indices = OpenSearchTestCase.randomBoolean() ? null : Strings.EMPTY_ARRAY; ActionRequestValidationException validationException = new DeleteIndexRequest(indices).validate(); @@ -984,51 +758,6 @@ public void testRollover() throws IOException { Assert.assertEquals(expectedParams, request.getParameters()); } - public void testRolloverWithTypes() throws IOException { - org.opensearch.action.admin.indices.rollover.RolloverRequest rolloverRequest = - new org.opensearch.action.admin.indices.rollover.RolloverRequest( - OpenSearchTestCase.randomAlphaOfLengthBetween(3, 10), - OpenSearchTestCase.randomBoolean() ? null : OpenSearchTestCase.randomAlphaOfLengthBetween(3, 10) - ); - Map expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomTimeout(rolloverRequest::timeout, rolloverRequest.timeout(), expectedParams); - RequestConvertersTests.setRandomMasterTimeout(rolloverRequest, expectedParams); - if (OpenSearchTestCase.randomBoolean()) { - rolloverRequest.dryRun(OpenSearchTestCase.randomBoolean()); - if (rolloverRequest.isDryRun()) { - expectedParams.put("dry_run", "true"); - } - } - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - if (OpenSearchTestCase.randomBoolean()) { - rolloverRequest.addMaxIndexAgeCondition(new TimeValue(OpenSearchTestCase.randomNonNegativeLong())); - } - if (OpenSearchTestCase.randomBoolean()) { - String type = OpenSearchTestCase.randomAlphaOfLengthBetween(3, 10); - rolloverRequest.getCreateIndexRequest().mapping(type, org.opensearch.index.RandomCreateIndexGenerator.randomMapping(type)); - } - if (OpenSearchTestCase.randomBoolean()) { - org.opensearch.index.RandomCreateIndexGenerator.randomAliases(rolloverRequest.getCreateIndexRequest()); - } - if (OpenSearchTestCase.randomBoolean()) { - rolloverRequest.getCreateIndexRequest().settings(org.opensearch.index.RandomCreateIndexGenerator.randomIndexSettings()); - } - RequestConvertersTests.setRandomWaitForActiveShards(rolloverRequest.getCreateIndexRequest()::waitForActiveShards, expectedParams); - - Request request = IndicesRequestConverters.rollover(rolloverRequest); - if (rolloverRequest.getNewIndexName() == null) { - Assert.assertEquals("/" + rolloverRequest.getRolloverTarget() + "/_rollover", request.getEndpoint()); - } else { - Assert.assertEquals( - "/" + rolloverRequest.getRolloverTarget() + "/_rollover/" + rolloverRequest.getNewIndexName(), - request.getEndpoint() - ); - } - Assert.assertEquals(HttpPost.METHOD_NAME, request.getMethod()); - RequestConvertersTests.assertToXContentBody(rolloverRequest, request.getEntity()); - Assert.assertEquals(expectedParams, request.getParameters()); - } - public void testGetAlias() { GetAliasesRequest getAliasesRequest = new GetAliasesRequest(); @@ -1093,57 +822,6 @@ public void testIndexPutSettings() throws IOException { Assert.assertEquals(expectedParams, request.getParameters()); } - public void testPutTemplateRequestWithTypes() throws Exception { - Map names = new HashMap<>(); - names.put("log", "log"); - names.put("template#1", "template%231"); - names.put("-#template", "-%23template"); - names.put("foo^bar", "foo%5Ebar"); - - org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putTemplateRequest = - new org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest().name( - OpenSearchTestCase.randomFrom(names.keySet()) - ).patterns(Arrays.asList(OpenSearchTestCase.generateRandomStringArray(20, 100, false, false))); - if (OpenSearchTestCase.randomBoolean()) { - putTemplateRequest.order(OpenSearchTestCase.randomInt()); - } - if (OpenSearchTestCase.randomBoolean()) { - putTemplateRequest.version(OpenSearchTestCase.randomInt()); - } - if (OpenSearchTestCase.randomBoolean()) { - putTemplateRequest.settings( - Settings.builder().put("setting-" + OpenSearchTestCase.randomInt(), OpenSearchTestCase.randomTimeValue()) - ); - } - Map expectedParams = new HashMap<>(); - if (OpenSearchTestCase.randomBoolean()) { - putTemplateRequest.mapping( - "doc-" + OpenSearchTestCase.randomInt(), - "field-" + OpenSearchTestCase.randomInt(), - "type=" + OpenSearchTestCase.randomFrom("text", "keyword") - ); - } - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - if (OpenSearchTestCase.randomBoolean()) { - putTemplateRequest.alias(new Alias("alias-" + OpenSearchTestCase.randomInt())); - } - if (OpenSearchTestCase.randomBoolean()) { - expectedParams.put("create", Boolean.TRUE.toString()); - putTemplateRequest.create(true); - } - if (OpenSearchTestCase.randomBoolean()) { - String cause = OpenSearchTestCase.randomUnicodeOfCodepointLengthBetween(1, 50); - putTemplateRequest.cause(cause); - expectedParams.put("cause", cause); - } - RequestConvertersTests.setRandomMasterTimeout(putTemplateRequest, expectedParams); - - Request request = IndicesRequestConverters.putTemplate(putTemplateRequest); - Assert.assertThat(request.getEndpoint(), equalTo("/_template/" + names.get(putTemplateRequest.name()))); - Assert.assertThat(request.getParameters(), equalTo(expectedParams)); - RequestConvertersTests.assertToXContentBody(putTemplateRequest, request.getEntity()); - } - public void testPutTemplateRequest() throws Exception { Map names = new HashMap<>(); names.put("log", "log"); @@ -1198,7 +876,6 @@ public void testPutTemplateRequest() throws Exception { public void testValidateQuery() throws Exception { String[] indices = OpenSearchTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5); - String[] types = OpenSearchTestCase.randomBoolean() ? OpenSearchTestCase.generateRandomStringArray(5, 5, false, false) : null; ValidateQueryRequest validateQueryRequest; if (OpenSearchTestCase.randomBoolean()) { validateQueryRequest = new ValidateQueryRequest(indices); @@ -1206,7 +883,6 @@ public void testValidateQuery() throws Exception { validateQueryRequest = new ValidateQueryRequest(); validateQueryRequest.indices(indices); } - validateQueryRequest.types(types); Map expectedParams = new HashMap<>(); RequestConvertersTests.setRandomIndicesOptions( validateQueryRequest::indicesOptions, @@ -1223,9 +899,6 @@ public void testValidateQuery() throws Exception { StringJoiner endpoint = new StringJoiner("/", "/", ""); if (indices != null && indices.length > 0) { endpoint.add(String.join(",", indices)); - if (types != null && types.length > 0) { - endpoint.add(String.join(",", types)); - } } endpoint.add("_validate/query"); Assert.assertThat(request.getEndpoint(), equalTo(endpoint.toString())); @@ -1247,8 +920,7 @@ public void testGetTemplateRequest() throws Exception { RequestConvertersTests.setRandomMasterTimeout(getTemplatesRequest::setMasterNodeTimeout, expectedParams); RequestConvertersTests.setRandomLocal(getTemplatesRequest::setLocal, expectedParams); - Request request = IndicesRequestConverters.getTemplatesWithDocumentTypes(getTemplatesRequest); - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); + Request request = IndicesRequestConverters.getTemplates(getTemplatesRequest); Assert.assertThat( request.getEndpoint(), equalTo("/_template/" + names.stream().map(encodes::get).collect(Collectors.joining(","))) diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java index 51b0ce00a14cd..32c6cde0725b4 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java @@ -172,10 +172,6 @@ public void testGet() { getAndExistsTest(RequestConverters::get, HttpGet.METHOD_NAME); } - public void testGetWithType() { - getAndExistsWithTypeTest(RequestConverters::get, HttpGet.METHOD_NAME); - } - public void testSourceExists() throws IOException { doTestSourceExists((index, id) -> new GetSourceRequest(index, id)); } @@ -221,13 +217,7 @@ private static void doTestSourceExists(BiFunction requestConverter, String method) { String index = randomAlphaOfLengthBetween(3, 10); String id = randomAlphaOfLengthBetween(3, 10); @@ -435,18 +409,6 @@ private static void getAndExistsTest(Function requestConver assertEquals(method, request.getMethod()); } - private static void getAndExistsWithTypeTest(Function requestConverter, String method) { - String index = randomAlphaOfLengthBetween(3, 10); - String type = randomAlphaOfLengthBetween(3, 10); - String id = randomAlphaOfLengthBetween(3, 10); - GetRequest getRequest = new GetRequest(index, type, id); - - Request request = requestConverter.apply(getRequest); - assertEquals("/" + index + "/" + type + "/" + id, request.getEndpoint()); - assertNull(request.getEntity()); - assertEquals(method, request.getMethod()); - } - public void testReindex() throws IOException { ReindexRequest reindexRequest = new ReindexRequest(); reindexRequest.setSourceIndices("source_idx"); @@ -468,15 +430,9 @@ public void testReindex() throws IOException { ); reindexRequest.setRemoteInfo(remoteInfo); } - if (randomBoolean()) { - reindexRequest.setSourceDocTypes("doc", "tweet"); - } if (randomBoolean()) { reindexRequest.setSourceBatchSize(randomInt(100)); } - if (randomBoolean()) { - reindexRequest.setDestDocType("tweet_and_doc"); - } if (randomBoolean()) { reindexRequest.setDestOpType("create"); } @@ -536,9 +492,6 @@ public void testUpdateByQuery() throws IOException { UpdateByQueryRequest updateByQueryRequest = new UpdateByQueryRequest(); updateByQueryRequest.indices(randomIndicesNames(1, 5)); Map expectedParams = new HashMap<>(); - if (randomBoolean()) { - updateByQueryRequest.setDocTypes(generateRandomStringArray(5, 5, false, false)); - } if (randomBoolean()) { int batchSize = randomInt(100); updateByQueryRequest.setBatchSize(batchSize); @@ -600,9 +553,6 @@ public void testUpdateByQuery() throws IOException { Request request = RequestConverters.updateByQuery(updateByQueryRequest); StringJoiner joiner = new StringJoiner("/", "/", ""); joiner.add(String.join(",", updateByQueryRequest.indices())); - if (updateByQueryRequest.getDocTypes().length > 0) { - joiner.add(String.join(",", updateByQueryRequest.getDocTypes())); - } joiner.add("_update_by_query"); assertEquals(joiner.toString(), request.getEndpoint()); assertEquals(HttpPost.METHOD_NAME, request.getMethod()); @@ -614,9 +564,6 @@ public void testDeleteByQuery() throws IOException { DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(); deleteByQueryRequest.indices(randomIndicesNames(1, 5)); Map expectedParams = new HashMap<>(); - if (randomBoolean()) { - deleteByQueryRequest.setDocTypes(generateRandomStringArray(5, 5, false, false)); - } if (randomBoolean()) { int batchSize = randomInt(100); deleteByQueryRequest.setBatchSize(batchSize); @@ -671,9 +618,6 @@ public void testDeleteByQuery() throws IOException { Request request = RequestConverters.deleteByQuery(deleteByQueryRequest); StringJoiner joiner = new StringJoiner("/", "/", ""); joiner.add(String.join(",", deleteByQueryRequest.indices())); - if (deleteByQueryRequest.getDocTypes().length > 0) { - joiner.add(String.join(",", deleteByQueryRequest.getDocTypes())); - } joiner.add("_delete_by_query"); assertEquals(joiner.toString(), request.getEndpoint()); assertEquals(HttpPost.METHOD_NAME, request.getMethod()); @@ -793,49 +737,6 @@ public void testIndex() throws IOException { } } - public void testIndexWithType() throws IOException { - String index = randomAlphaOfLengthBetween(3, 10); - String type = randomAlphaOfLengthBetween(3, 10); - IndexRequest indexRequest = new IndexRequest(index, type); - String id = randomBoolean() ? randomAlphaOfLengthBetween(3, 10) : null; - indexRequest.id(id); - - String method = HttpPost.METHOD_NAME; - if (id != null) { - method = HttpPut.METHOD_NAME; - if (randomBoolean()) { - indexRequest.opType(DocWriteRequest.OpType.CREATE); - } - } - XContentType xContentType = randomFrom(XContentType.values()); - int nbFields = randomIntBetween(0, 10); - try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { - builder.startObject(); - for (int i = 0; i < nbFields; i++) { - builder.field("field_" + i, i); - } - builder.endObject(); - indexRequest.source(builder); - } - - Request request = RequestConverters.index(indexRequest); - if (indexRequest.opType() == DocWriteRequest.OpType.CREATE) { - assertEquals("/" + index + "/" + type + "/" + id + "/_create", request.getEndpoint()); - } else if (id != null) { - assertEquals("/" + index + "/" + type + "/" + id, request.getEndpoint()); - } else { - assertEquals("/" + index + "/" + type, request.getEndpoint()); - } - assertEquals(method, request.getMethod()); - - HttpEntity entity = request.getEntity(); - assertTrue(entity instanceof NByteArrayEntity); - assertEquals(indexRequest.getContentType().mediaTypeWithoutParameters(), entity.getContentType().getValue()); - try (XContentParser parser = createParser(xContentType.xContent(), entity.getContent())) { - assertEquals(nbFields, parser.map().size()); - } - } - public void testUpdate() throws IOException { XContentType xContentType = randomFrom(XContentType.values()); @@ -944,23 +845,6 @@ private static void setRandomIfSeqNoAndTerm(DocWriteRequest request, Map { UpdateRequest updateRequest = new UpdateRequest(); @@ -1055,7 +939,6 @@ public void testBulk() throws IOException { assertEquals(originalRequest.opType(), parsedRequest.opType()); assertEquals(originalRequest.index(), parsedRequest.index()); - assertEquals(originalRequest.type(), parsedRequest.type()); assertEquals(originalRequest.id(), parsedRequest.id()); assertEquals(originalRequest.routing(), parsedRequest.routing()); assertEquals(originalRequest.version(), parsedRequest.version()); @@ -1191,10 +1074,6 @@ public void testSearch() throws Exception { if (Strings.hasLength(index)) { endpoint.add(index); } - String type = String.join(",", searchRequest.types()); - if (Strings.hasLength(type)) { - endpoint.add(type); - } endpoint.add(searchEndpoint); assertEquals(HttpPost.METHOD_NAME, request.getMethod()); assertEquals(endpoint.toString(), request.getEndpoint()); @@ -1204,14 +1083,6 @@ public void testSearch() throws Exception { public static SearchRequest createTestSearchRequest(String[] indices, Map expectedParams) { SearchRequest searchRequest = new SearchRequest(indices); - - int numTypes = randomIntBetween(0, 5); - String[] types = new String[numTypes]; - for (int i = 0; i < numTypes; i++) { - types[i] = "type-" + randomAlphaOfLengthBetween(2, 5); - } - searchRequest.types(types); - setRandomSearchParams(searchRequest, expectedParams); setRandomIndicesOptions(searchRequest::indicesOptions, searchRequest::indicesOptions, expectedParams); @@ -1278,7 +1149,6 @@ public static SearchSourceBuilder createTestSearchSourceBuilder() { public void testSearchNullIndicesAndTypes() { expectThrows(NullPointerException.class, () -> new SearchRequest((String[]) null)); expectThrows(NullPointerException.class, () -> new SearchRequest().indices((String[]) null)); - expectThrows(NullPointerException.class, () -> new SearchRequest().types((String[]) null)); } public void testCountNotNullSource() throws IOException { @@ -1293,14 +1163,6 @@ public void testCountNotNullSource() throws IOException { public void testCount() throws Exception { String[] indices = randomIndicesNames(0, 5); CountRequest countRequest = new CountRequest(indices); - - int numTypes = randomIntBetween(0, 5); - String[] types = new String[numTypes]; - for (int i = 0; i < numTypes; i++) { - types[i] = "type-" + randomAlphaOfLengthBetween(2, 5); - } - countRequest.types(types); - Map expectedParams = new HashMap<>(); setRandomCountParams(countRequest, expectedParams); setRandomIndicesOptions(countRequest::indicesOptions, countRequest::indicesOptions, expectedParams); @@ -1317,10 +1179,6 @@ public void testCount() throws Exception { if (Strings.hasLength(index)) { endpoint.add(index); } - String type = String.join(",", types); - if (Strings.hasLength(type)) { - endpoint.add(type); - } endpoint.add("_count"); assertEquals(HttpPost.METHOD_NAME, request.getMethod()); assertEquals(endpoint.toString(), request.getEndpoint()); @@ -1328,12 +1186,6 @@ public void testCount() throws Exception { assertToXContentBody(countRequest, request.getEntity()); } - public void testCountNullIndicesAndTypes() { - expectThrows(NullPointerException.class, () -> new CountRequest((String[]) null)); - expectThrows(NullPointerException.class, () -> new CountRequest().indices((String[]) null)); - expectThrows(NullPointerException.class, () -> new CountRequest().types((String[]) null)); - } - private static void setRandomCountParams(CountRequest countRequest, Map expectedParams) { if (randomBoolean()) { countRequest.routing(randomAlphaOfLengthBetween(3, 10)); @@ -1416,7 +1268,6 @@ public void testMultiSearch() throws IOException { null, null, null, - null, xContentRegistry(), true, deprecationLogger @@ -1602,21 +1453,6 @@ public void testExplain() throws IOException { assertToXContentBody(explainRequest, request.getEntity()); } - public void testExplainWithType() throws IOException { - String index = randomAlphaOfLengthBetween(3, 10); - String type = randomAlphaOfLengthBetween(3, 10); - String id = randomAlphaOfLengthBetween(3, 10); - - ExplainRequest explainRequest = new ExplainRequest(index, type, id); - explainRequest.query(QueryBuilders.termQuery(randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10))); - - Request request = RequestConverters.explain(explainRequest); - assertEquals(HttpGet.METHOD_NAME, request.getMethod()); - assertEquals("/" + index + "/" + type + "/" + id + "/_explain", request.getEndpoint()); - - assertToXContentBody(explainRequest, request.getEntity()); - } - public void testTermVectors() throws IOException { String index = randomAlphaOfLengthBetween(3, 10); String id = randomAlphaOfLengthBetween(3, 10); diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java index 60c984ad2cc81..19e287fb91be5 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java @@ -1134,7 +1134,6 @@ public void testExplain() throws IOException { ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); assertThat(explainResponse.getIndex(), equalTo("index1")); - assertThat(explainResponse.getType(), equalTo("_doc")); assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1)); assertTrue(explainResponse.isExists()); assertTrue(explainResponse.isMatch()); @@ -1149,7 +1148,6 @@ public void testExplain() throws IOException { ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); assertThat(explainResponse.getIndex(), equalTo("index1")); - assertThat(explainResponse.getType(), equalTo("_doc")); assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1)); assertTrue(explainResponse.isExists()); assertTrue(explainResponse.isMatch()); @@ -1164,7 +1162,6 @@ public void testExplain() throws IOException { ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); assertThat(explainResponse.getIndex(), equalTo("index1")); - assertThat(explainResponse.getType(), equalTo("_doc")); assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1)); assertTrue(explainResponse.isExists()); assertFalse(explainResponse.isMatch()); @@ -1180,7 +1177,6 @@ public void testExplain() throws IOException { ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); assertThat(explainResponse.getIndex(), equalTo("index1")); - assertThat(explainResponse.getType(), equalTo("_doc")); assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1)); assertTrue(explainResponse.isExists()); assertFalse(explainResponse.isMatch()); @@ -1212,7 +1208,6 @@ public void testExplainNonExistent() throws IOException { ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); assertThat(explainResponse.getIndex(), equalTo("index1")); - assertThat(explainResponse.getType(), equalTo("_doc")); assertThat(explainResponse.getId(), equalTo("999")); assertFalse(explainResponse.isExists()); assertFalse(explainResponse.isMatch()); diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/TasksIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/TasksIT.java index 0db8ee4406c8c..d987e786fff76 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/TasksIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/TasksIT.java @@ -117,7 +117,7 @@ public void testGetValidTask() throws Exception { } org.opensearch.tasks.TaskInfo info = taskResponse.getTaskInfo(); assertTrue(info.isCancellable()); - assertEquals("reindex from [source1] to [dest][_doc]", info.getDescription()); + assertEquals("reindex from [source1] to [dest]", info.getDescription()); assertEquals("indices:data/write/reindex", info.getAction()); if (taskResponse.isCompleted() == false) { assertBusy(checkTaskCompletionStatus(client(), taskId)); diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/core/TermVectorsResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/core/TermVectorsResponseTests.java index 11a6aeb6dbe47..33b82c10d8873 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/core/TermVectorsResponseTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/core/TermVectorsResponseTests.java @@ -59,7 +59,6 @@ public void testFromXContent() throws IOException { static void toXContent(TermVectorsResponse response, XContentBuilder builder) throws IOException { builder.startObject(); builder.field("_index", response.getIndex()); - builder.field("_type", response.getType()); if (response.getId() != null) { builder.field("_id", response.getId()); } @@ -130,7 +129,6 @@ private static void toXContent(TermVectorsResponse.TermVector tv, XContentBuilde static TermVectorsResponse createTestInstance() { String index = randomAlphaOfLength(5); - String type = randomAlphaOfLength(5); String id = String.valueOf(randomIntBetween(1, 100)); long version = randomNonNegativeLong(); long tookInMillis = randomNonNegativeLong(); @@ -154,7 +152,7 @@ static TermVectorsResponse createTestInstance() { ); } } - TermVectorsResponse tvresponse = new TermVectorsResponse(index, type, id, version, found, tookInMillis, tvList); + TermVectorsResponse tvresponse = new TermVectorsResponse(index, id, version, found, tookInMillis, tvList); return tvresponse; } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/CRUDDocumentationIT.java index 67df99d9d7c08..959c5a827f143 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/CRUDDocumentationIT.java @@ -1719,9 +1719,8 @@ public void testTermVectors() throws Exception { // tag::term-vectors-response String index = response.getIndex(); // <1> - String type = response.getType(); // <2> - String id = response.getId(); // <3> - boolean found = response.getFound(); // <4> + String id = response.getId(); // <2> + boolean found = response.getFound(); // <3> // end::term-vectors-response if (response.getTermVectorsList() != null) { @@ -2051,7 +2050,6 @@ private MultiGetItemResponse unwrapAndAssertExample(MultiGetResponse response) { assertThat(response.getResponses(), arrayWithSize(1)); MultiGetItemResponse item = response.getResponses()[0]; assertEquals("index", item.getIndex()); - assertEquals("_doc", item.getType()); assertEquals("example_id", item.getId()); return item; } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/QueryDSLDocumentationTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/QueryDSLDocumentationTests.java index 47a116458cb96..9f5c2e51a7960 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/QueryDSLDocumentationTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/QueryDSLDocumentationTests.java @@ -88,7 +88,6 @@ import static org.opensearch.index.query.QueryBuilders.spanWithinQuery; import static org.opensearch.index.query.QueryBuilders.termQuery; import static org.opensearch.index.query.QueryBuilders.termsQuery; -import static org.opensearch.index.query.QueryBuilders.typeQuery; import static org.opensearch.index.query.QueryBuilders.wildcardQuery; import static org.opensearch.index.query.QueryBuilders.wrapperQuery; import static org.opensearch.index.query.functionscore.ScoreFunctionBuilders.exponentialDecayFunction; @@ -447,12 +446,6 @@ public void testTerms() { // end::terms } - public void testType() { - // tag::type - typeQuery("my_type"); // <1> - // end::type - } - public void testWildcard() { // tag::wildcard wildcardQuery( diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexResponseTests.java index 2141ce30dce64..37f4d95d5f4d0 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexResponseTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexResponseTests.java @@ -33,19 +33,17 @@ package org.opensearch.client.indices; import org.apache.lucene.util.CollectionUtil; +import org.opensearch.client.AbstractResponseTestCase; import org.opensearch.client.GetAliasesResponseTests; import org.opensearch.cluster.metadata.AliasMetadata; import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.common.settings.IndexScopedSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.ToXContent.Params; -import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.common.xcontent.XContentParser; +import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.RandomCreateIndexGenerator; import org.opensearch.index.mapper.MapperService; -import org.opensearch.rest.BaseRestHandler; -import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; import java.util.ArrayList; @@ -57,40 +55,18 @@ import java.util.Map; import java.util.Objects; -import static org.opensearch.test.AbstractXContentTestCase.xContentTester; - -public class GetIndexResponseTests extends OpenSearchTestCase { - - // Because the client-side class does not have a toXContent method, we test xContent serialization by creating - // a random client object, converting it to a server object then serializing it to xContent, and finally - // parsing it back as a client object. We check equality between the original client object, and the parsed one. - public void testFromXContent() throws IOException { - xContentTester( - this::createParser, - GetIndexResponseTests::createTestInstance, - GetIndexResponseTests::toXContent, - GetIndexResponse::fromXContent - ).supportsUnknownFields(false) - .assertToXContentEquivalence(false) - .assertEqualsConsumer(GetIndexResponseTests::assertEqualInstances) - .test(); - } - - private static void assertEqualInstances(GetIndexResponse expected, GetIndexResponse actual) { - assertArrayEquals(expected.getIndices(), actual.getIndices()); - assertEquals(expected.getMappings(), actual.getMappings()); - assertEquals(expected.getSettings(), actual.getSettings()); - assertEquals(expected.getDefaultSettings(), actual.getDefaultSettings()); - assertEquals(expected.getAliases(), actual.getAliases()); - } +public class GetIndexResponseTests extends AbstractResponseTestCase< + org.opensearch.action.admin.indices.get.GetIndexResponse, + GetIndexResponse> { - private static GetIndexResponse createTestInstance() { + @Override + protected org.opensearch.action.admin.indices.get.GetIndexResponse createServerTestInstance(XContentType xContentType) { String[] indices = generateRandomStringArray(5, 5, false, false); - Map mappings = new HashMap<>(); - Map> aliases = new HashMap<>(); - Map settings = new HashMap<>(); - Map defaultSettings = new HashMap<>(); - Map dataStreams = new HashMap<>(); + ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder> aliases = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder settings = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder defaultSettings = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder dataStreams = ImmutableOpenMap.builder(); IndexScopedSettings indexScopedSettings = IndexScopedSettings.DEFAULT_SCOPED_SETTINGS; boolean includeDefaults = randomBoolean(); for (String index : indices) { @@ -116,17 +92,36 @@ private static GetIndexResponse createTestInstance() { dataStreams.put(index, randomAlphaOfLength(5).toLowerCase(Locale.ROOT)); } } - return new GetIndexResponse(indices, mappings, aliases, settings, defaultSettings, dataStreams); + return new org.opensearch.action.admin.indices.get.GetIndexResponse( + indices, + mappings.build(), + aliases.build(), + settings.build(), + defaultSettings.build(), + dataStreams.build() + ); + } + + @Override + protected GetIndexResponse doParseToClientInstance(XContentParser parser) throws IOException { + return GetIndexResponse.fromXContent(parser); + } + + @Override + protected void assertInstances( + org.opensearch.action.admin.indices.get.GetIndexResponse serverTestInstance, + GetIndexResponse clientInstance + ) { + assertArrayEquals(serverTestInstance.getIndices(), clientInstance.getIndices()); + assertMapEquals(serverTestInstance.getMappings(), clientInstance.getMappings()); + assertMapEquals(serverTestInstance.getSettings(), clientInstance.getSettings()); + assertMapEquals(serverTestInstance.defaultSettings(), clientInstance.getDefaultSettings()); + assertMapEquals(serverTestInstance.getAliases(), clientInstance.getAliases()); } private static MappingMetadata createMappingsForIndex() { int typeCount = rarely() ? 0 : 1; - MappingMetadata mmd; - try { - mmd = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, Collections.emptyMap()); - } catch (IOException e) { - throw new RuntimeException(e); - } + MappingMetadata mmd = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, Collections.emptyMap()); for (int i = 0; i < typeCount; i++) { if (rarely() == false) { // rarely have no fields Map mappings = new HashMap<>(); @@ -135,12 +130,8 @@ private static MappingMetadata createMappingsForIndex() { mappings.put("field2-" + i, randomFieldMapping()); } - try { - String typeName = MapperService.SINGLE_MAPPING_NAME; - mmd = new MappingMetadata(typeName, mappings); - } catch (IOException e) { - fail("shouldn't have failed " + e); - } + String typeName = MapperService.SINGLE_MAPPING_NAME; + mmd = new MappingMetadata(typeName, mappings); } } return mmd; @@ -178,39 +169,4 @@ private static Map randomFieldMapping() { } return mappings; } - - private static void toXContent(GetIndexResponse response, XContentBuilder builder) throws IOException { - // first we need to repackage from GetIndexResponse to org.opensearch.action.admin.indices.get.GetIndexResponse - ImmutableOpenMap.Builder> allMappings = ImmutableOpenMap.builder(); - ImmutableOpenMap.Builder> aliases = ImmutableOpenMap.builder(); - ImmutableOpenMap.Builder settings = ImmutableOpenMap.builder(); - ImmutableOpenMap.Builder defaultSettings = ImmutableOpenMap.builder(); - - Map indexMappings = response.getMappings(); - for (String index : response.getIndices()) { - MappingMetadata mmd = indexMappings.get(index); - ImmutableOpenMap.Builder typedMappings = ImmutableOpenMap.builder(); - if (mmd != null) { - typedMappings.put(MapperService.SINGLE_MAPPING_NAME, mmd); - } - allMappings.put(index, typedMappings.build()); - aliases.put(index, response.getAliases().get(index)); - settings.put(index, response.getSettings().get(index)); - defaultSettings.put(index, response.getDefaultSettings().get(index)); - } - - org.opensearch.action.admin.indices.get.GetIndexResponse serverResponse = - new org.opensearch.action.admin.indices.get.GetIndexResponse( - response.getIndices(), - allMappings.build(), - aliases.build(), - settings.build(), - defaultSettings.build(), - ImmutableOpenMap.builder().build() - ); - - // then we can call its toXContent method, forcing no output of types - Params params = new ToXContent.MapParams(Collections.singletonMap(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, "false")); - serverResponse.toXContent(builder, params); - } } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexTemplatesResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexTemplatesResponseTests.java index 4049fcb41df99..ead5fd4087c0b 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexTemplatesResponseTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexTemplatesResponseTests.java @@ -50,7 +50,6 @@ import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; -import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; @@ -84,7 +83,7 @@ public void testFromXContent() throws IOException { .test(); } - public void testParsingFromEsResponse() throws IOException { + public void testParsingFromOpenSearchResponse() throws IOException { for (int runs = 0; runs < 20; runs++) { org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse esResponse = new org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse(new ArrayList<>()); @@ -132,8 +131,7 @@ public void testParsingFromEsResponse() throws IOException { assertThat(result.order(), equalTo(esIMD.order())); assertThat(result.version(), equalTo(esIMD.version())); - assertThat(esIMD.mappings().size(), equalTo(1)); - BytesReference mappingSource = esIMD.mappings().valuesIt().next().uncompressed(); + BytesReference mappingSource = esIMD.mappings().uncompressed(); Map expectedMapping = XContentHelper.convertToMap(mappingSource, true, xContentBuilder.contentType()) .v2(); assertThat(result.mappings().sourceAsMap(), equalTo(expectedMapping.get("_doc"))); @@ -196,13 +194,9 @@ static GetIndexTemplatesResponse createTestInstance() { templateBuilder.version(between(0, 100)); } if (randomBoolean()) { - try { - Map map = XContentHelper.convertToMap(new BytesArray(mappingString), true, XContentType.JSON).v2(); - MappingMetadata mapping = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, map); - templateBuilder.mapping(mapping); - } catch (IOException ex) { - throw new UncheckedIOException(ex); - } + Map map = XContentHelper.convertToMap(new BytesArray(mappingString), true, XContentType.JSON).v2(); + MappingMetadata mapping = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, map); + templateBuilder.mapping(mapping); } templates.add(templateBuilder.build()); } @@ -229,7 +223,10 @@ static void toXContent(GetIndexTemplatesResponse response, XContentBuilder build serverTemplateBuilder.order(clientITMD.order()); serverTemplateBuilder.version(clientITMD.version()); if (clientITMD.mappings() != null) { - serverTemplateBuilder.putMapping(MapperService.SINGLE_MAPPING_NAME, clientITMD.mappings().source()); + // The client-side mappings never include a wrapping type, but server-side mappings + // for index templates still do so we need to wrap things here + String mappings = "{\"" + MapperService.SINGLE_MAPPING_NAME + "\": " + clientITMD.mappings().source().string() + "}"; + serverTemplateBuilder.putMapping(MapperService.SINGLE_MAPPING_NAME, mappings); } serverIndexTemplates.add(serverTemplateBuilder.build()); diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetMappingsResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetMappingsResponseTests.java index 817bce359b7d7..cb62b116de020 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetMappingsResponseTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetMappingsResponseTests.java @@ -32,70 +32,54 @@ package org.opensearch.client.indices; +import org.opensearch.client.AbstractResponseTestCase; import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.collect.ImmutableOpenMap; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.ToXContent.Params; -import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.common.xcontent.XContentParser; +import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.mapper.MapperService; -import org.opensearch.rest.BaseRestHandler; -import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; -import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Objects; -import java.util.function.Predicate; -import static org.opensearch.client.indices.GetMappingsResponse.MAPPINGS; -import static org.opensearch.test.AbstractXContentTestCase.xContentTester; +public class GetMappingsResponseTests extends AbstractResponseTestCase< + org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse, + GetMappingsResponse> { -public class GetMappingsResponseTests extends OpenSearchTestCase { - - // Because the client-side class does not have a toXContent method, we test xContent serialization by creating - // a random client object, converting it to a server object then serializing it to xContent, and finally - // parsing it back as a client object. We check equality between the original client object, and the parsed one. - public void testFromXContent() throws IOException { - xContentTester( - this::createParser, - GetMappingsResponseTests::createTestInstance, - GetMappingsResponseTests::toXContent, - GetMappingsResponse::fromXContent - ).supportsUnknownFields(true) - .assertEqualsConsumer(GetMappingsResponseTests::assertEqualInstances) - .randomFieldsExcludeFilter(randomFieldsExcludeFilter()) - .test(); - } - - private static GetMappingsResponse createTestInstance() { - Map mappings = Collections.singletonMap("index-" + randomAlphaOfLength(5), randomMappingMetadata()); - return new GetMappingsResponse(mappings); + @Override + protected org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse createServerTestInstance(XContentType xContentType) { + ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(); + int numberOfIndexes = randomIntBetween(1, 5); + for (int i = 0; i < numberOfIndexes; i++) { + mappings.put("index-" + randomAlphaOfLength(5), randomMappingMetadata()); + } + return new org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse(mappings.build()); } - private static void assertEqualInstances(GetMappingsResponse expected, GetMappingsResponse actual) { - assertEquals(expected.mappings(), actual.mappings()); + @Override + protected GetMappingsResponse doParseToClientInstance(XContentParser parser) throws IOException { + return GetMappingsResponse.fromXContent(parser); } - private Predicate randomFieldsExcludeFilter() { - return field -> !field.equals(MAPPINGS.getPreferredName()); + @Override + protected void assertInstances( + org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse serverTestInstance, + GetMappingsResponse clientInstance + ) { + assertMapEquals(serverTestInstance.getMappings(), clientInstance.mappings()); } public static MappingMetadata randomMappingMetadata() { Map mappings = new HashMap<>(); - if (frequently()) { // rarely have no fields mappings.put("field1", randomFieldMapping()); if (randomBoolean()) { mappings.put("field2", randomFieldMapping()); } } - - try { - return new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, mappings); - } catch (IOException e) { - throw new RuntimeException(e); - } + return new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, mappings); } private static Map randomFieldMapping() { @@ -110,22 +94,4 @@ private static Map randomFieldMapping() { } return mappings; } - - private static void toXContent(GetMappingsResponse response, XContentBuilder builder) throws IOException { - Params params = new ToXContent.MapParams(Collections.singletonMap(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, "false")); - ImmutableOpenMap.Builder> allMappings = ImmutableOpenMap.builder(); - - for (Map.Entry indexEntry : response.mappings().entrySet()) { - ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(); - mappings.put(MapperService.SINGLE_MAPPING_NAME, indexEntry.getValue()); - allMappings.put(indexEntry.getKey(), mappings.build()); - } - - org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse serverResponse = - new org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse(allMappings.build()); - - builder.startObject(); - serverResponse.toXContent(builder, params); - builder.endObject(); - } } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/indices/rollover/RolloverResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/indices/rollover/RolloverResponseTests.java index 7577aa66bfcde..0c924bc06046c 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/indices/rollover/RolloverResponseTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/indices/rollover/RolloverResponseTests.java @@ -38,11 +38,8 @@ import org.opensearch.action.admin.indices.rollover.MaxSizeCondition; import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.test.OpenSearchTestCase; -import org.opensearch.rest.BaseRestHandler; -import org.opensearch.common.xcontent.ToXContent.Params; import java.io.IOException; import java.util.ArrayList; @@ -51,7 +48,6 @@ import java.util.Map; import java.util.function.Predicate; import java.util.function.Supplier; -import java.util.Collections; import static org.opensearch.test.AbstractXContentTestCase.xContentTester; @@ -94,7 +90,6 @@ private Predicate getRandomFieldsExcludeFilter() { } private static void toXContent(RolloverResponse response, XContentBuilder builder) throws IOException { - Params params = new ToXContent.MapParams(Collections.singletonMap(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, "false")); org.opensearch.action.admin.indices.rollover.RolloverResponse serverResponse = new org.opensearch.action.admin.indices.rollover.RolloverResponse( response.getOldIndex(), @@ -105,6 +100,6 @@ private static void toXContent(RolloverResponse response, XContentBuilder builde response.isAcknowledged(), response.isShardsAcknowledged() ); - serverResponse.toXContent(builder, params); + serverResponse.toXContent(builder, null); } } diff --git a/client/rest/build.gradle b/client/rest/build.gradle index 2271fed252793..5c1252061443a 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -33,8 +33,8 @@ import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis apply plugin: 'opensearch.build' apply plugin: 'opensearch.publish' -targetCompatibility = JavaVersion.VERSION_1_8 -sourceCompatibility = JavaVersion.VERSION_1_8 +targetCompatibility = JavaVersion.VERSION_11 +sourceCompatibility = JavaVersion.VERSION_11 group = 'org.opensearch.client' archivesBaseName = 'opensearch-rest-client' diff --git a/client/rest/src/main/java/org/opensearch/client/NodeSelector.java b/client/rest/src/main/java/org/opensearch/client/NodeSelector.java index 398a3a72b9414..09d5a2c1fe576 100644 --- a/client/rest/src/main/java/org/opensearch/client/NodeSelector.java +++ b/client/rest/src/main/java/org/opensearch/client/NodeSelector.java @@ -48,7 +48,7 @@ public interface NodeSelector { * iterate the nodes as many times as they need. *

* This may be called twice per request: first for "living" nodes that - * have not been blacklisted by previous errors. If the selector removes + * have not been denylisted by previous errors. If the selector removes * all nodes from the list or if there aren't any living nodes then the * {@link RestClient} will call this method with a list of "dead" nodes. *

diff --git a/client/rest/src/main/java/org/opensearch/client/RestClient.java b/client/rest/src/main/java/org/opensearch/client/RestClient.java index c004613f89b7f..4f899fd709112 100644 --- a/client/rest/src/main/java/org/opensearch/client/RestClient.java +++ b/client/rest/src/main/java/org/opensearch/client/RestClient.java @@ -125,7 +125,7 @@ public class RestClient implements Closeable { final List

defaultHeaders; private final String pathPrefix; private final AtomicInteger lastNodeIndex = new AtomicInteger(0); - private final ConcurrentMap blacklist = new ConcurrentHashMap<>(); + private final ConcurrentMap denylist = new ConcurrentHashMap<>(); private final FailureListener failureListener; private final NodeSelector nodeSelector; private volatile NodeTuple> nodeTuple; @@ -246,7 +246,7 @@ public synchronized void setNodes(Collection nodes) { authCache.put(node.getHost(), new BasicScheme()); } this.nodeTuple = new NodeTuple<>(Collections.unmodifiableList(new ArrayList<>(nodesByHost.values())), authCache); - this.blacklist.clear(); + this.denylist.clear(); } /** @@ -448,7 +448,7 @@ public void cancelled() { */ private NodeTuple> nextNodes() throws IOException { NodeTuple> nodeTuple = this.nodeTuple; - Iterable hosts = selectNodes(nodeTuple, blacklist, lastNodeIndex, nodeSelector); + Iterable hosts = selectNodes(nodeTuple, denylist, lastNodeIndex, nodeSelector); return new NodeTuple<>(hosts.iterator(), nodeTuple.authCache); } @@ -458,17 +458,17 @@ private NodeTuple> nextNodes() throws IOException { */ static Iterable selectNodes( NodeTuple> nodeTuple, - Map blacklist, + Map denylist, AtomicInteger lastNodeIndex, NodeSelector nodeSelector ) throws IOException { /* * Sort the nodes into living and dead lists. */ - List livingNodes = new ArrayList<>(Math.max(0, nodeTuple.nodes.size() - blacklist.size())); - List deadNodes = new ArrayList<>(blacklist.size()); + List livingNodes = new ArrayList<>(Math.max(0, nodeTuple.nodes.size() - denylist.size())); + List deadNodes = new ArrayList<>(denylist.size()); for (Node node : nodeTuple.nodes) { - DeadHostState deadness = blacklist.get(node.getHost()); + DeadHostState deadness = denylist.get(node.getHost()); if (deadness == null || deadness.shallBeRetried()) { livingNodes.add(node); } else { @@ -526,9 +526,9 @@ static Iterable selectNodes( * Receives as an argument the host that was used for the successful request. */ private void onResponse(Node node) { - DeadHostState removedHost = this.blacklist.remove(node.getHost()); + DeadHostState removedHost = this.denylist.remove(node.getHost()); if (logger.isDebugEnabled() && removedHost != null) { - logger.debug("removed [" + node + "] from blacklist"); + logger.debug("removed [" + node + "] from denylist"); } } @@ -538,19 +538,19 @@ private void onResponse(Node node) { */ private void onFailure(Node node) { while (true) { - DeadHostState previousDeadHostState = blacklist.putIfAbsent( + DeadHostState previousDeadHostState = denylist.putIfAbsent( node.getHost(), new DeadHostState(DeadHostState.DEFAULT_TIME_SUPPLIER) ); if (previousDeadHostState == null) { if (logger.isDebugEnabled()) { - logger.debug("added [" + node + "] to blacklist"); + logger.debug("added [" + node + "] to denylist"); } break; } - if (blacklist.replace(node.getHost(), previousDeadHostState, new DeadHostState(previousDeadHostState))) { + if (denylist.replace(node.getHost(), previousDeadHostState, new DeadHostState(previousDeadHostState))) { if (logger.isDebugEnabled()) { - logger.debug("updated [" + node + "] already in blacklist"); + logger.debug("updated [" + node + "] already in denylist"); } break; } @@ -718,8 +718,8 @@ static class NodeTuple { } /** - * Contains a reference to a blacklisted node and the time until it is - * revived. We use this so we can do a single pass over the blacklist. + * Contains a reference to a denylisted node and the time until it is + * revived. We use this so we can do a single pass over the denylist. */ private static class DeadNode implements Comparable { final Node node; diff --git a/client/rest/src/test/java/org/opensearch/client/RestClientMultipleHostsTests.java b/client/rest/src/test/java/org/opensearch/client/RestClientMultipleHostsTests.java index 0011622fe24b0..0b7d2881ccb54 100644 --- a/client/rest/src/test/java/org/opensearch/client/RestClientMultipleHostsTests.java +++ b/client/rest/src/test/java/org/opensearch/client/RestClientMultipleHostsTests.java @@ -62,7 +62,7 @@ import static org.junit.Assert.fail; /** - * Tests for {@link RestClient} behaviour against multiple hosts: fail-over, blacklisting etc. + * Tests for {@link RestClient} behaviour against multiple hosts: fail-over, denylisting etc. * Relies on a mock http client to intercept requests and return desired responses based on request path. */ public class RestClientMultipleHostsTests extends RestClientTestCase { @@ -154,7 +154,7 @@ public void testRoundRobinRetryErrors() throws Exception { fail("request should have failed"); } catch (ResponseException e) { Set hostsSet = hostsSet(); - // first request causes all the hosts to be blacklisted, the returned exception holds one suppressed exception each + // first request causes all the hosts to be denylisted, the returned exception holds one suppressed exception each failureListener.assertCalled(nodes); do { Response response = e.getResponse(); @@ -175,7 +175,7 @@ public void testRoundRobinRetryErrors() throws Exception { assertEquals("every host should have been used but some weren't: " + hostsSet, 0, hostsSet.size()); } catch (IOException e) { Set hostsSet = hostsSet(); - // first request causes all the hosts to be blacklisted, the returned exception holds one suppressed exception each + // first request causes all the hosts to be denylisted, the returned exception holds one suppressed exception each failureListener.assertCalled(nodes); do { HttpHost httpHost = HttpHost.create(e.getMessage()); @@ -211,13 +211,13 @@ public void testRoundRobinRetryErrors() throws Exception { "host [" + response.getHost() + "] not found, most likely used multiple times", hostsSet.remove(response.getHost()) ); - // after the first request, all hosts are blacklisted, a single one gets resurrected each time + // after the first request, all hosts are denylisted, a single one gets resurrected each time failureListener.assertCalled(response.getHost()); assertEquals(0, e.getSuppressed().length); } catch (IOException e) { HttpHost httpHost = HttpHost.create(e.getMessage()); assertTrue("host [" + httpHost + "] not found, most likely used multiple times", hostsSet.remove(httpHost)); - // after the first request, all hosts are blacklisted, a single one gets resurrected each time + // after the first request, all hosts are denylisted, a single one gets resurrected each time failureListener.assertCalled(httpHost); assertEquals(0, e.getSuppressed().length); } diff --git a/client/rest/src/test/java/org/opensearch/client/RestClientTests.java b/client/rest/src/test/java/org/opensearch/client/RestClientTests.java index 169e2dbcfd8c5..ca761dcb6b9b6 100644 --- a/client/rest/src/test/java/org/opensearch/client/RestClientTests.java +++ b/client/rest/src/test/java/org/opensearch/client/RestClientTests.java @@ -260,11 +260,11 @@ public String toString() { NodeTuple> nodeTuple = new NodeTuple<>(Arrays.asList(n1, n2, n3), null); - Map emptyBlacklist = Collections.emptyMap(); + Map emptyDenylist = Collections.emptyMap(); // Normal cases where the node selector doesn't reject all living nodes - assertSelectLivingHosts(Arrays.asList(n1, n2, n3), nodeTuple, emptyBlacklist, NodeSelector.ANY); - assertSelectLivingHosts(Arrays.asList(n2, n3), nodeTuple, emptyBlacklist, not1); + assertSelectLivingHosts(Arrays.asList(n1, n2, n3), nodeTuple, emptyDenylist, NodeSelector.ANY); + assertSelectLivingHosts(Arrays.asList(n2, n3), nodeTuple, emptyDenylist, not1); /* * Try a NodeSelector that excludes all nodes. This should @@ -274,83 +274,83 @@ public String toString() { String message = "NodeSelector [NONE] rejected all nodes, living [" + "[host=http://1, version=1], [host=http://2, version=2], " + "[host=http://3, version=3]] and dead []"; - assertEquals(message, assertSelectAllRejected(nodeTuple, emptyBlacklist, noNodes)); + assertEquals(message, assertSelectAllRejected(nodeTuple, emptyDenylist, noNodes)); } // Mark all the nodes dead for a few test cases { final AtomicLong time = new AtomicLong(0L); Supplier timeSupplier = time::get; - Map blacklist = new HashMap<>(); - blacklist.put(n1.getHost(), new DeadHostState(timeSupplier)); - blacklist.put(n2.getHost(), new DeadHostState(new DeadHostState(timeSupplier))); - blacklist.put(n3.getHost(), new DeadHostState(new DeadHostState(new DeadHostState(timeSupplier)))); + Map denylist = new HashMap<>(); + denylist.put(n1.getHost(), new DeadHostState(timeSupplier)); + denylist.put(n2.getHost(), new DeadHostState(new DeadHostState(timeSupplier))); + denylist.put(n3.getHost(), new DeadHostState(new DeadHostState(new DeadHostState(timeSupplier)))); /* - * case when fewer nodeTuple than blacklist, won't result in any IllegalCapacityException + * case when fewer nodeTuple than denylist, won't result in any IllegalCapacityException */ { NodeTuple> fewerNodeTuple = new NodeTuple<>(Arrays.asList(n1, n2), null); - assertSelectLivingHosts(Arrays.asList(n1), fewerNodeTuple, blacklist, NodeSelector.ANY); - assertSelectLivingHosts(Arrays.asList(n2), fewerNodeTuple, blacklist, not1); + assertSelectLivingHosts(Arrays.asList(n1), fewerNodeTuple, denylist, NodeSelector.ANY); + assertSelectLivingHosts(Arrays.asList(n2), fewerNodeTuple, denylist, not1); } /* * selectHosts will revive a single host regardless of - * blacklist time. It'll revive the node that is closest + * denylist time. It'll revive the node that is closest * to being revived that the NodeSelector is ok with. */ - assertEquals(singletonList(n1), RestClient.selectNodes(nodeTuple, blacklist, new AtomicInteger(), NodeSelector.ANY)); - assertEquals(singletonList(n2), RestClient.selectNodes(nodeTuple, blacklist, new AtomicInteger(), not1)); + assertEquals(singletonList(n1), RestClient.selectNodes(nodeTuple, denylist, new AtomicInteger(), NodeSelector.ANY)); + assertEquals(singletonList(n2), RestClient.selectNodes(nodeTuple, denylist, new AtomicInteger(), not1)); /* * Try a NodeSelector that excludes all nodes. This should * return a failure, but a different failure than when the - * blacklist is empty so that the caller knows that all of - * their nodes are blacklisted AND blocked. + * denylist is empty so that the caller knows that all of + * their nodes are denylisted AND blocked. */ String message = "NodeSelector [NONE] rejected all nodes, living [] and dead [" + "[host=http://1, version=1], [host=http://2, version=2], " + "[host=http://3, version=3]]"; - assertEquals(message, assertSelectAllRejected(nodeTuple, blacklist, noNodes)); + assertEquals(message, assertSelectAllRejected(nodeTuple, denylist, noNodes)); /* * Now lets wind the clock forward, past the timeout for one of * the dead nodes. We should return it. */ time.set(new DeadHostState(timeSupplier).getDeadUntilNanos()); - assertSelectLivingHosts(Arrays.asList(n1), nodeTuple, blacklist, NodeSelector.ANY); + assertSelectLivingHosts(Arrays.asList(n1), nodeTuple, denylist, NodeSelector.ANY); /* * But if the NodeSelector rejects that node then we'll pick the * first on that the NodeSelector doesn't reject. */ - assertSelectLivingHosts(Arrays.asList(n2), nodeTuple, blacklist, not1); + assertSelectLivingHosts(Arrays.asList(n2), nodeTuple, denylist, not1); /* * If we wind the clock way into the future, past any of the - * blacklist timeouts then we function as though the nodes aren't - * in the blacklist at all. + * denylist timeouts then we function as though the nodes aren't + * in the denylist at all. */ time.addAndGet(DeadHostState.MAX_CONNECTION_TIMEOUT_NANOS); - assertSelectLivingHosts(Arrays.asList(n1, n2, n3), nodeTuple, blacklist, NodeSelector.ANY); - assertSelectLivingHosts(Arrays.asList(n2, n3), nodeTuple, blacklist, not1); + assertSelectLivingHosts(Arrays.asList(n1, n2, n3), nodeTuple, denylist, NodeSelector.ANY); + assertSelectLivingHosts(Arrays.asList(n2, n3), nodeTuple, denylist, not1); } } private void assertSelectLivingHosts( List expectedNodes, NodeTuple> nodeTuple, - Map blacklist, + Map denylist, NodeSelector nodeSelector ) throws IOException { int iterations = 1000; AtomicInteger lastNodeIndex = new AtomicInteger(0); - assertEquals(expectedNodes, RestClient.selectNodes(nodeTuple, blacklist, lastNodeIndex, nodeSelector)); + assertEquals(expectedNodes, RestClient.selectNodes(nodeTuple, denylist, lastNodeIndex, nodeSelector)); // Calling it again rotates the set of results for (int i = 1; i < iterations; i++) { Collections.rotate(expectedNodes, 1); - assertEquals("iteration " + i, expectedNodes, RestClient.selectNodes(nodeTuple, blacklist, lastNodeIndex, nodeSelector)); + assertEquals("iteration " + i, expectedNodes, RestClient.selectNodes(nodeTuple, denylist, lastNodeIndex, nodeSelector)); } } @@ -360,11 +360,11 @@ private void assertSelectLivingHosts( */ private static String assertSelectAllRejected( NodeTuple> nodeTuple, - Map blacklist, + Map denylist, NodeSelector nodeSelector ) { try { - RestClient.selectNodes(nodeTuple, blacklist, new AtomicInteger(0), nodeSelector); + RestClient.selectNodes(nodeTuple, denylist, new AtomicInteger(0), nodeSelector); throw new AssertionError("expected selectHosts to fail"); } catch (IOException e) { return e.getMessage(); diff --git a/client/sniffer/build.gradle b/client/sniffer/build.gradle index f81f4ccc3b1e8..bc4be1dd153e8 100644 --- a/client/sniffer/build.gradle +++ b/client/sniffer/build.gradle @@ -30,8 +30,8 @@ apply plugin: 'opensearch.build' apply plugin: 'opensearch.publish' -targetCompatibility = JavaVersion.VERSION_1_8 -sourceCompatibility = JavaVersion.VERSION_1_8 +targetCompatibility = JavaVersion.VERSION_11 +sourceCompatibility = JavaVersion.VERSION_11 group = 'org.opensearch.client' archivesBaseName = 'opensearch-rest-client-sniffer' diff --git a/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java b/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java index 2a6362c611329..adddb3bda725c 100644 --- a/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java +++ b/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java @@ -160,9 +160,8 @@ public void run() { // tasks are run by a single threaded executor, so swapping is safe with a simple volatile variable ScheduledTask previousTask = nextScheduledTask; nextScheduledTask = new ScheduledTask(task, future); - assert initialized.get() == false - || previousTask.task.isSkipped() - || previousTask.task.hasStarted() : "task that we are replacing is neither " + "cancelled nor has it ever started"; + assert initialized.get() == false || previousTask.task.isSkipped() || previousTask.task.hasStarted() + : "task that we are replacing is neither " + "cancelled nor has it ever started"; } } diff --git a/client/test/build.gradle b/client/test/build.gradle index 7d1333a84eae7..07d874cf01ea7 100644 --- a/client/test/build.gradle +++ b/client/test/build.gradle @@ -29,8 +29,8 @@ */ apply plugin: 'opensearch.build' -targetCompatibility = JavaVersion.VERSION_1_8 -sourceCompatibility = JavaVersion.VERSION_1_8 +targetCompatibility = JavaVersion.VERSION_11 +sourceCompatibility = JavaVersion.VERSION_11 group = "${group}.client.test" diff --git a/distribution/docker/src/docker/Dockerfile b/distribution/docker/src/docker/Dockerfile index c9be5c632cb59..c980217b0b8dc 100644 --- a/distribution/docker/src/docker/Dockerfile +++ b/distribution/docker/src/docker/Dockerfile @@ -64,7 +64,7 @@ FROM ${base_image} ENV OPENSEARCH_CONTAINER true RUN sed -i 's/mirrorlist/#mirrorlist/g' /etc/yum.repos.d/CentOS-Linux-* && \\ - sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-Linux-* && \\ + sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.epel.cloud|g' /etc/yum.repos.d/CentOS-Linux-* && \\ for iter in {1..10}; do \\ ${package_manager} update --setopt=tsflags=nodocs -y && \\ ${package_manager} install --setopt=tsflags=nodocs -y \\ diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index e5c75af5188e5..93a82ff324835 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -63,7 +63,7 @@ import java.util.regex.Pattern */ plugins { - id "nebula.ospackage-base" version "9.0.0" + id "nebula.ospackage-base" version "9.1.1" } void addProcessFilesTask(String type, boolean jdk) { diff --git a/distribution/tools/java-version-checker/build.gradle b/distribution/tools/java-version-checker/build.gradle index 1990c2b35c151..9480a86ce6fb7 100644 --- a/distribution/tools/java-version-checker/build.gradle +++ b/distribution/tools/java-version-checker/build.gradle @@ -11,7 +11,9 @@ apply plugin: 'opensearch.build' -targetCompatibility = JavaVersion.VERSION_1_7 +sourceCompatibility = JavaVersion.VERSION_11 +targetCompatibility = JavaVersion.VERSION_11 + // targetting very old java versions enables a warning by default on newer JDK: disable it. compileJava.options.compilerArgs += '-Xlint:-options' diff --git a/distribution/tools/keystore-cli/build.gradle b/distribution/tools/keystore-cli/build.gradle index 670c898019d28..1e7473f787ca0 100644 --- a/distribution/tools/keystore-cli/build.gradle +++ b/distribution/tools/keystore-cli/build.gradle @@ -34,6 +34,6 @@ dependencies { compileOnly project(":server") compileOnly project(":libs:opensearch-cli") testImplementation project(":test:framework") - testImplementation 'com.google.jimfs:jimfs:1.1' - testRuntimeOnly 'com.google.guava:guava:30.1.1-jre' + testImplementation 'com.google.jimfs:jimfs:1.2' + testRuntimeOnly 'com.google.guava:guava:31.1-jre' } diff --git a/distribution/tools/plugin-cli/build.gradle b/distribution/tools/plugin-cli/build.gradle index d96fced1ec293..b2e81491da6bd 100644 --- a/distribution/tools/plugin-cli/build.gradle +++ b/distribution/tools/plugin-cli/build.gradle @@ -36,10 +36,10 @@ dependencies { compileOnly project(":server") compileOnly project(":libs:opensearch-cli") api "org.bouncycastle:bcpg-fips:1.0.5.1" - api "org.bouncycastle:bc-fips:1.0.2.1" + api "org.bouncycastle:bc-fips:1.0.2.3" testImplementation project(":test:framework") - testImplementation 'com.google.jimfs:jimfs:1.1' - testRuntimeOnly 'com.google.guava:guava:30.1.1-jre' + testImplementation 'com.google.jimfs:jimfs:1.2' + testRuntimeOnly 'com.google.guava:guava:31.1-jre' } tasks.named("dependencyLicenses").configure { diff --git a/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.1.jar.sha1 b/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.1.jar.sha1 deleted file mode 100644 index 3c2bd02f432fe..0000000000000 --- a/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3110169183fc532d00f0930f2b5901672515eb7c \ No newline at end of file diff --git a/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.3.jar.sha1 b/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.3.jar.sha1 new file mode 100644 index 0000000000000..c71320050b7de --- /dev/null +++ b/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.3.jar.sha1 @@ -0,0 +1 @@ +da62b32cb72591f5b4d322e6ab0ce7de3247b534 \ No newline at end of file diff --git a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/InstallPluginCommand.java b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/InstallPluginCommand.java index d6f619784c536..8acf137043a92 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/InstallPluginCommand.java +++ b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/InstallPluginCommand.java @@ -218,11 +218,23 @@ class InstallPluginCommand extends EnvironmentAwareCommand { Arrays.asList("b", "batch"), "Enable batch mode explicitly, automatic confirmation of security permission" ); - this.arguments = parser.nonOptions("plugin id"); + this.arguments = parser.nonOptions("plugin "); } @Override protected void printAdditionalHelp(Terminal terminal) { + terminal.println("Plugins are packaged as zip files. Each packaged plugin must contain a plugin properties file."); + terminal.println(""); + + // List possible plugin id inputs + terminal.println("The install command takes a plugin id, which may be any of the following:"); + terminal.println(" An official opensearch plugin name"); + terminal.println(" Maven coordinates to a plugin zip"); + terminal.println(" A URL to a plugin zip"); + terminal.println(" A local zip file"); + terminal.println(""); + + // List official opensearch plugin names terminal.println("The following official plugins may be installed by name:"); for (String plugin : OFFICIAL_PLUGINS) { terminal.println(" " + plugin); diff --git a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java index a57050540a216..e0e5cbc54276e 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java @@ -828,6 +828,31 @@ protected boolean addShutdownHook() { } } + public void testPluginsHelpNonOptionArgumentsOutput() throws Exception { + MockTerminal terminal = new MockTerminal(); + new InstallPluginCommand() { + @Override + protected boolean addShutdownHook() { + return false; + } + }.main(new String[] { "--help" }, terminal); + try (BufferedReader reader = new BufferedReader(new StringReader(terminal.getOutput()))) { + + // grab first line of --help output + String line = reader.readLine(); + + // find the beginning of Non-option arguments list + while (line.contains("Non-option arguments:") == false) { + line = reader.readLine(); + } + + // check that non option agrument list contains correct string + line = reader.readLine(); + assertThat(line, containsString("")); + + } + } + public void testInstallMisspelledOfficialPlugins() throws Exception { Tuple env = createEnv(fs, temp); diff --git a/distribution/tools/upgrade-cli/build.gradle b/distribution/tools/upgrade-cli/build.gradle index 5018a4bb8702e..0e1996f3d68fa 100644 --- a/distribution/tools/upgrade-cli/build.gradle +++ b/distribution/tools/upgrade-cli/build.gradle @@ -19,7 +19,7 @@ dependencies { implementation "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}" testImplementation project(":test:framework") testImplementation 'com.google.jimfs:jimfs:1.2' - testRuntimeOnly 'com.google.guava:guava:30.1.1-jre' + testRuntimeOnly 'com.google.guava:guava:31.1-jre' } tasks.named("dependencyLicenses").configure { diff --git a/libs/cli/build.gradle b/libs/cli/build.gradle index 9abaf35e589be..7f1e9cb8d04b3 100644 --- a/libs/cli/build.gradle +++ b/libs/cli/build.gradle @@ -32,7 +32,7 @@ apply plugin: 'nebula.optional-base' apply plugin: 'opensearch.publish' dependencies { - api 'net.sf.jopt-simple:jopt-simple:5.0.2' + api 'net.sf.jopt-simple:jopt-simple:5.0.4' api project(':libs:opensearch-core') } diff --git a/libs/cli/licenses/jopt-simple-5.0.2.jar.sha1 b/libs/cli/licenses/jopt-simple-5.0.2.jar.sha1 deleted file mode 100644 index b50ed4fea3bd1..0000000000000 --- a/libs/cli/licenses/jopt-simple-5.0.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -98cafc6081d5632b61be2c9e60650b64ddbc637c \ No newline at end of file diff --git a/libs/cli/licenses/jopt-simple-5.0.4.jar.sha1 b/libs/cli/licenses/jopt-simple-5.0.4.jar.sha1 new file mode 100644 index 0000000000000..7ade81efe4d0d --- /dev/null +++ b/libs/cli/licenses/jopt-simple-5.0.4.jar.sha1 @@ -0,0 +1 @@ +4fdac2fbe92dfad86aa6e9301736f6b4342a3f5c \ No newline at end of file diff --git a/libs/core/build.gradle b/libs/core/build.gradle index edb05cd1c22b0..374f2fe572a12 100644 --- a/libs/core/build.gradle +++ b/libs/core/build.gradle @@ -54,13 +54,13 @@ if (!isEclipse) { } compileJava11Java { - sourceCompatibility = 11 - targetCompatibility = 11 + sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_11 } forbiddenApisJava11 { if (BuildParams.runtimeJavaVersion < JavaVersion.VERSION_11) { - targetCompatibility = JavaVersion.VERSION_11.getMajorVersion() + targetCompatibility = JavaVersion.VERSION_11 } replaceSignatureFiles 'jdk-signatures' } diff --git a/libs/core/src/test/java/org/opensearch/common/util/concurrent/RefCountedTests.java b/libs/core/src/test/java/org/opensearch/common/util/concurrent/RefCountedTests.java index 47cf49b3e320f..f784ef9d16464 100644 --- a/libs/core/src/test/java/org/opensearch/common/util/concurrent/RefCountedTests.java +++ b/libs/core/src/test/java/org/opensearch/common/util/concurrent/RefCountedTests.java @@ -31,13 +31,13 @@ package org.opensearch.common.util.concurrent; +import org.opensearch.common.concurrent.OneWayGate; import org.opensearch.test.OpenSearchTestCase; import org.hamcrest.Matchers; import java.io.IOException; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -138,7 +138,7 @@ public void run() { private final class MyRefCounted extends AbstractRefCounted { - private final AtomicBoolean closed = new AtomicBoolean(false); + private final OneWayGate gate = new OneWayGate(); MyRefCounted() { super("test"); @@ -146,11 +146,11 @@ private final class MyRefCounted extends AbstractRefCounted { @Override protected void closeInternal() { - this.closed.set(true); + gate.close(); } public void ensureOpen() { - if (closed.get()) { + if (gate.isClosed()) { assert this.refCount() == 0; throw new IllegalStateException("closed"); } diff --git a/libs/grok/build.gradle b/libs/grok/build.gradle index b324bba381a26..ce23406721fe6 100644 --- a/libs/grok/build.gradle +++ b/libs/grok/build.gradle @@ -29,7 +29,7 @@ */ dependencies { - api 'org.jruby.joni:joni:2.1.29' + api 'org.jruby.joni:joni:2.1.41' // joni dependencies: api 'org.jruby.jcodings:jcodings:1.0.44' @@ -41,3 +41,7 @@ dependencies { tasks.named('forbiddenApisMain').configure { replaceSignatureFiles 'jdk-signatures' } + +thirdPartyAudit.ignoreMissingClasses( + 'org.jcodings.unicode.UnicodeCodeRange' +) \ No newline at end of file diff --git a/libs/grok/licenses/joni-2.1.29.jar.sha1 b/libs/grok/licenses/joni-2.1.29.jar.sha1 deleted file mode 100644 index 251ff2ec05a19..0000000000000 --- a/libs/grok/licenses/joni-2.1.29.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3cb751702e1194ff24259155db4d37e9383d4320 \ No newline at end of file diff --git a/libs/grok/licenses/joni-2.1.41.jar.sha1 b/libs/grok/licenses/joni-2.1.41.jar.sha1 new file mode 100644 index 0000000000000..4f0a0a8393dd0 --- /dev/null +++ b/libs/grok/licenses/joni-2.1.41.jar.sha1 @@ -0,0 +1 @@ +4a35f4eaef792073bc081b756b1f4949879cd41e \ No newline at end of file diff --git a/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java b/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java index 08f1efd4dac45..f41c49844997d 100644 --- a/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java +++ b/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java @@ -54,7 +54,7 @@ * a thread must have {@code modifyThread} to even terminate its own pool, leaving * system threads unprotected. * - * This class throws exception on {@code exitVM} calls, and provides a whitelist where calls + * This class throws exception on {@code exitVM} calls, and provides an allowlist where calls * from exit are allowed. *

* Additionally it enforces threadgroup security with the following rules: diff --git a/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java b/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java index c4d775e040dff..02c3bdfd70ec2 100644 --- a/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java +++ b/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java @@ -76,7 +76,7 @@ public void testCustomWordDelimiterQueryString() { .addMapping("type1", "field1", "type=text,analyzer=my_analyzer", "field2", "type=text,analyzer=my_analyzer") ); - client().prepareIndex("test", "type1", "1").setSource("field1", "foo bar baz", "field2", "not needed").get(); + client().prepareIndex("test").setId("1").setSource("field1", "foo bar baz", "field2", "not needed").get(); refresh(); SearchResponse response = client().prepareSearch("test") diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/AnalysisPainlessExtension.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/AnalysisPainlessExtension.java index c479a6d01eea5..1c13e51788f26 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/AnalysisPainlessExtension.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/AnalysisPainlessExtension.java @@ -43,13 +43,13 @@ public class AnalysisPainlessExtension implements PainlessExtension { - private static final Whitelist WHITELIST = WhitelistLoader.loadFromResourceFiles( + private static final Whitelist ALLOWLIST = WhitelistLoader.loadFromResourceFiles( AnalysisPainlessExtension.class, "painless_whitelist.txt" ); @Override public Map, List> getContextWhitelists() { - return Collections.singletonMap(AnalysisPredicateScript.CONTEXT, Collections.singletonList(WHITELIST)); + return Collections.singletonMap(AnalysisPredicateScript.CONTEXT, Collections.singletonList(ALLOWLIST)); } } diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisPlugin.java index 98956a62edba5..47a144311c0a7 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisPlugin.java @@ -257,6 +257,7 @@ public Map> getTokenFilters() { filters.put("classic", ClassicFilterFactory::new); filters.put("czech_stem", CzechStemTokenFilterFactory::new); filters.put("common_grams", requiresAnalysisSettings(CommonGramsTokenFilterFactory::new)); + filters.put("concatenate_graph", ConcatenateGraphTokenFilterFactory::new); filters.put( "condition", requiresAnalysisSettings((i, e, n, s) -> new ScriptedConditionTokenFilterFactory(i, n, s, scriptService.get())) diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactory.java new file mode 100644 index 0000000000000..0d1a2b185d1d3 --- /dev/null +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactory.java @@ -0,0 +1,81 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.analysis.common; + +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.miscellaneous.ConcatenateGraphFilter; +import org.apache.lucene.util.automaton.TooComplexToDeterminizeException; +import org.opensearch.LegacyESVersion; +import org.opensearch.common.settings.Settings; +import org.opensearch.env.Environment; +import org.opensearch.index.IndexSettings; +import org.opensearch.index.analysis.AbstractTokenFilterFactory; + +/** + * Factory for {@link ConcatenateGraphFilter}. + * Adopted from {@link org.apache.lucene.analysis.miscellaneous.ConcatenateGraphFilterFactory}, with some changes to + * default values: token_separator is a "space", preserve_position_increments is false to avoid duplicated separators, + * max_graph_expansions is 100 as the default value of 10_000 seems to be unnecessarily large and preserve_separator is false. + * + *

    + *
  • preserve_separator: + * For LegacyESVersion lesser than {@link LegacyESVersion#V_7_6_0} i.e. lucene versions lesser + * than {@link org.apache.lucene.util.Version#LUCENE_8_4_0} + * Whether {@link ConcatenateGraphFilter#SEP_LABEL} should separate the input tokens in the concatenated token. + *
  • + *
  • token_separator: + * Separator to use for concatenation. Must be a String with a single character or empty. + * If not present, {@link ConcatenateGraphTokenFilterFactory#DEFAULT_TOKEN_SEPARATOR} will be used. + * If empty i.e. "", tokens will be concatenated without any separators. + *
  • + *
  • preserve_position_increments: + * Whether to add an empty token for missing positions. + * If not present, {@link ConcatenateGraphTokenFilterFactory#DEFAULT_PRESERVE_POSITION_INCREMENTS} will be used. + *
  • + *
  • max_graph_expansions: + * If the tokenStream graph has more than this many possible paths through, then we'll throw + * {@link TooComplexToDeterminizeException} to preserve the stability and memory of the + * machine. + * If not present, {@link ConcatenateGraphTokenFilterFactory#DEFAULT_MAX_GRAPH_EXPANSIONS} will be used. + *
  • + *
+ * @see ConcatenateGraphFilter + */ +public class ConcatenateGraphTokenFilterFactory extends AbstractTokenFilterFactory { + public static final String DEFAULT_TOKEN_SEPARATOR = " "; + public static final int DEFAULT_MAX_GRAPH_EXPANSIONS = 100; + public static final boolean DEFAULT_PRESERVE_POSITION_INCREMENTS = false; + + private final Character tokenSeparator; + private final int maxGraphExpansions; + private final boolean preservePositionIncrements; + + ConcatenateGraphTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { + super(indexSettings, name, settings); + + if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_6_0)) { // i.e. Lucene 8.4.0 + String separator = settings.get("token_separator", DEFAULT_TOKEN_SEPARATOR); + if (separator.length() > 1) { + throw new IllegalArgumentException("token_separator must be either empty or a single character"); + } + tokenSeparator = separator.length() == 0 ? null : separator.charAt(0); // null means no separator while concatenating + } else { + boolean preserveSep = settings.getAsBoolean("preserve_separator", ConcatenateGraphFilter.DEFAULT_PRESERVE_SEP); + tokenSeparator = preserveSep ? ConcatenateGraphFilter.DEFAULT_TOKEN_SEPARATOR : null; + } + + maxGraphExpansions = settings.getAsInt("max_graph_expansions", DEFAULT_MAX_GRAPH_EXPANSIONS); + preservePositionIncrements = settings.getAsBoolean("preserve_position_increments", DEFAULT_PRESERVE_POSITION_INCREMENTS); + } + + @Override + public TokenStream create(TokenStream tokenStream) { + return new ConcatenateGraphFilter(tokenStream, tokenSeparator, preservePositionIncrements, maxGraphExpansions); + } +} diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactoryTests.java new file mode 100644 index 0000000000000..ef4146b65872d --- /dev/null +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactoryTests.java @@ -0,0 +1,260 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.analysis.common; + +import org.apache.lucene.analysis.CannedTokenStream; +import org.apache.lucene.analysis.Token; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.core.WhitespaceTokenizer; +import org.apache.lucene.analysis.miscellaneous.ConcatenateGraphFilter; +import org.apache.lucene.util.automaton.TooComplexToDeterminizeException; +import org.opensearch.LegacyESVersion; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.common.settings.Settings; +import org.opensearch.env.Environment; +import org.opensearch.index.analysis.AnalysisTestsHelper; +import org.opensearch.index.analysis.NamedAnalyzer; +import org.opensearch.index.analysis.TokenFilterFactory; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.test.OpenSearchTokenStreamTestCase; +import org.opensearch.test.VersionUtils; + +import java.io.IOException; +import java.io.StringReader; + +public class ConcatenateGraphTokenFilterFactoryTests extends OpenSearchTokenStreamTestCase { + public void testSimpleTokenizerAndConcatenate() throws IOException { + OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings( + Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(), + new CommonAnalysisPlugin() + ); + + TokenFilterFactory tokenFilter = analysis.tokenFilter.get("concatenate_graph"); + String source = "PowerShot Is AweSome"; + Tokenizer tokenizer = new WhitespaceTokenizer(); + tokenizer.setReader(new StringReader(source)); + + assertTokenStreamContents(tokenFilter.create(tokenizer), new String[] { "PowerShot Is AweSome" }); + } + + public void testTokenizerCustomizedSeparator() throws IOException { + OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings( + Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put("index.analysis.filter.my_concatenate_graph.type", "concatenate_graph") + .put("index.analysis.filter.my_concatenate_graph.token_separator", "+") + .build(), + new CommonAnalysisPlugin() + ); + + TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_concatenate_graph"); + String source = "PowerShot Is AweSome"; + Tokenizer tokenizer = new WhitespaceTokenizer(); + tokenizer.setReader(new StringReader(source)); + + assertTokenStreamContents(tokenFilter.create(tokenizer), new String[] { "PowerShot+Is+AweSome" }); + } + + public void testOldLuceneVersionSeparator() throws IOException { + OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings( + Settings.builder() + .put( + IndexMetadata.SETTING_VERSION_CREATED, + VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, LegacyESVersion.V_7_5_2) + ) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put("index.analysis.filter.my_concatenate_graph.type", "concatenate_graph") + .put("index.analysis.filter.my_concatenate_graph.token_separator", "+") // this will be ignored + .build(), + new CommonAnalysisPlugin() + ); + + TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_concatenate_graph"); + String source = "PowerShot Is AweSome"; + Tokenizer tokenizer = new WhitespaceTokenizer(); + tokenizer.setReader(new StringReader(source)); + + // earlier Lucene version will only use Lucene's default separator + assertTokenStreamContents( + tokenFilter.create(tokenizer), + new String[] { + "PowerShot" + + ConcatenateGraphFilter.DEFAULT_TOKEN_SEPARATOR + + "Is" + + ConcatenateGraphFilter.DEFAULT_TOKEN_SEPARATOR + + "AweSome" } + ); + } + + public void testOldLuceneVersionNoSeparator() throws IOException { + OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings( + Settings.builder() + .put( + IndexMetadata.SETTING_VERSION_CREATED, + VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, LegacyESVersion.V_7_5_2) + ) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put("index.analysis.filter.my_concatenate_graph.type", "concatenate_graph") + .put("index.analysis.filter.my_concatenate_graph.token_separator", "+") // this will be ignored + .put("index.analysis.filter.my_concatenate_graph.preserve_separator", "false") + .build(), + new CommonAnalysisPlugin() + ); + + TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_concatenate_graph"); + String source = "PowerShot Is AweSome"; + Tokenizer tokenizer = new WhitespaceTokenizer(); + tokenizer.setReader(new StringReader(source)); + + // earlier Lucene version will not add separator if preserve_separator is false + assertTokenStreamContents(tokenFilter.create(tokenizer), new String[] { "PowerShotIsAweSome" }); + } + + public void testTokenizerEmptySeparator() throws IOException { + OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings( + Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put("index.analysis.filter.my_concatenate_graph.type", "concatenate_graph") + .put("index.analysis.filter.my_concatenate_graph.token_separator", "") + .build(), + new CommonAnalysisPlugin() + ); + + TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_concatenate_graph"); + String source = "PowerShot Is AweSome"; + Tokenizer tokenizer = new WhitespaceTokenizer(); + tokenizer.setReader(new StringReader(source)); + + assertTokenStreamContents(tokenFilter.create(tokenizer), new String[] { "PowerShotIsAweSome" }); + } + + public void testPreservePositionIncrementsDefault() throws IOException { + OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings( + Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put("index.analysis.filter.my_concatenate_graph.type", "concatenate_graph") + .put("index.analysis.filter.my_concatenate_graph.token_separator", "+") + .build(), + new CommonAnalysisPlugin() + ); + + TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_concatenate_graph"); + + CannedTokenStream cannedTokenStream = new CannedTokenStream( + new Token("a", 1, 0, 1), + new Token("b", 2, 2, 3), // there is a gap, posInc is 2 + new Token("d", 1, 4, 5) + ); + + // the gap between a and b is not preserved + assertTokenStreamContents(tokenFilter.create(cannedTokenStream), new String[] { "a+b+d" }); + } + + public void testPreservePositionIncrementsTrue() throws IOException { + OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings( + Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put("index.analysis.filter.my_concatenate_graph.type", "concatenate_graph") + .put("index.analysis.filter.my_concatenate_graph.token_separator", "+") + .put("index.analysis.filter.my_concatenate_graph.preserve_position_increments", "true") + .build(), + new CommonAnalysisPlugin() + ); + + TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_concatenate_graph"); + + CannedTokenStream cannedTokenStream = new CannedTokenStream( + new Token("a", 1, 0, 1), + new Token("b", 2, 2, 3), // there is a gap, posInc is 2 + new Token("d", 1, 4, 5) + ); + + // the gap between a and b is preserved + assertTokenStreamContents(tokenFilter.create(cannedTokenStream), new String[] { "a++b+d" }); + } + + public void testGraph() throws IOException { + OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings( + Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter_graph") + .put("index.analysis.filter.my_word_delimiter.catenate_words", "true") + .put("index.analysis.filter.my_concatenate_graph.type", "concatenate_graph") + .put("index.analysis.analyzer.my_analyzer.type", "custom") + .put("index.analysis.analyzer.my_analyzer.tokenizer", "whitespace") + .put("index.analysis.analyzer.my_analyzer.filter", "my_word_delimiter, my_concatenate_graph") + .build(), + new CommonAnalysisPlugin() + ); + + String source = "PowerShot Is AweSome"; + + // Expected output from Whitespace Tokenizer is: "PowerShot" --> "Is" --> "Awe" --> "Some" + // Expected output from word_delimiter_graph is a graph: + // ---> "Power" --> "Shot" ---> "Is" ---> "Awe" ---> "Some" --- + // | | | | + // --> "PowerShot" -------- --> "AweSome" --------- + // and this filter will traverse through all possible paths to produce concatenated tokens + String[] expected = new String[] { + "Power Shot Is Awe Some", + "Power Shot Is AweSome", + "PowerShot Is Awe Some", + "PowerShot Is AweSome" }; + + // all tokens will be in the same position + int[] expectedPosIncrements = new int[] { 1, 0, 0, 0 }; + int[] expectedPosLengths = new int[] { 1, 1, 1, 1 }; + + NamedAnalyzer analyzer = analysis.indexAnalyzers.get("my_analyzer"); + assertAnalyzesToPositions(analyzer, source, expected, expectedPosIncrements, expectedPosLengths); + } + + public void testInvalidSeparator() { + expectThrows( + IllegalArgumentException.class, + () -> AnalysisTestsHelper.createTestAnalysisFromSettings( + Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put("index.analysis.filter.my_concatenate_graph.type", "concatenate_graph") + .put("index.analysis.filter.my_concatenate_graph.token_separator", "11") + .build(), + new CommonAnalysisPlugin() + ) + ); + } + + /** + * Similar to the {@link #testGraph()} case, there will be 4 paths generated by word_delimiter_graph. + * By setting max_graph_expansions to 3, we expect an exception. + */ + public void testMaxGraphExpansion() throws IOException { + OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings( + Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter_graph") + .put("index.analysis.filter.my_word_delimiter.catenate_words", "true") + .put("index.analysis.filter.my_concatenate_graph.type", "concatenate_graph") + .put("index.analysis.filter.my_concatenate_graph.max_graph_expansions", "3") + .put("index.analysis.analyzer.my_analyzer.type", "custom") + .put("index.analysis.analyzer.my_analyzer.tokenizer", "whitespace") + .put("index.analysis.analyzer.my_analyzer.filter", "my_word_delimiter, my_concatenate_graph") + .build(), + new CommonAnalysisPlugin() + ); + + String source = "PowerShot Is AweSome"; + + TokenStream tokenStream = analysis.indexAnalyzers.get("my_analyzer").tokenStream("dummy", source); + + tokenStream.reset(); + + expectThrows(TooComplexToDeterminizeException.class, tokenStream::incrementToken); + } +} diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java index bc7dd3b110287..faaf636d4a8ff 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java @@ -137,10 +137,9 @@ public void testNgramHighlightingWithBrokenPositions() throws IOException { .putList("analysis.analyzer.search_autocomplete.filter", "lowercase", "wordDelimiter") ) ); - client().prepareIndex("test", "test", "1").setSource("name", "ARCOTEL Hotels Deutschland").get(); + client().prepareIndex("test").setId("1").setSource("name", "ARCOTEL Hotels Deutschland").get(); refresh(); SearchResponse search = client().prepareSearch("test") - .setTypes("test") .setQuery(matchQuery("name.autocomplete", "deut tel").operator(Operator.OR)) .highlighter(new HighlightBuilder().field("name.autocomplete")) .get(); @@ -174,7 +173,8 @@ public void testMultiPhraseCutoff() throws IOException { ); ensureGreen(); - client().prepareIndex("test", "test", "1") + client().prepareIndex("test") + .setId("1") .setSource( "body", "Test: http://www.facebook.com http://elasticsearch.org " @@ -236,7 +236,7 @@ public void testSynonyms() throws IOException { ); ensureGreen(); - client().prepareIndex("test", "type1", "0").setSource("field1", "The quick brown fox jumps over the lazy dog").get(); + client().prepareIndex("test").setId("0").setSource("field1", "The quick brown fox jumps over the lazy dog").get(); refresh(); for (String highlighterType : new String[] { "plain", "fvh", "unified" }) { logger.info("--> highlighting (type=" + highlighterType + ") and searching on field1"); @@ -264,10 +264,12 @@ public void testPhrasePrefix() throws IOException { ensureGreen(); - client().prepareIndex("first_test_index", "type1", "0") + client().prepareIndex("first_test_index") + .setId("0") .setSource("field0", "The quick brown fox jumps over the lazy dog", "field1", "The quick brown fox jumps over the lazy dog") .get(); - client().prepareIndex("first_test_index", "type1", "1") + client().prepareIndex("first_test_index") + .setId("1") .setSource("field1", "The quick browse button is a fancy thing, right bro?") .get(); refresh(); @@ -345,7 +347,8 @@ public void testPhrasePrefix() throws IOException { ) ); // with synonyms - client().prepareIndex("second_test_index", "doc", "0") + client().prepareIndex("second_test_index") + .setId("0") .setSource( "type", "type2", @@ -355,10 +358,11 @@ public void testPhrasePrefix() throws IOException { "The quick brown fox jumps over the lazy dog" ) .get(); - client().prepareIndex("second_test_index", "doc", "1") + client().prepareIndex("second_test_index") + .setId("1") .setSource("type", "type2", "field4", "The quick browse button is a fancy thing, right bro?") .get(); - client().prepareIndex("second_test_index", "doc", "2").setSource("type", "type2", "field4", "a quick fast blue car").get(); + client().prepareIndex("second_test_index").setId("2").setSource("type", "type2", "field4", "a quick fast blue car").get(); refresh(); source = searchSource().postFilter(termQuery("type", "type2")) diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/30_tokenizers.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/30_tokenizers.yml index 3bca0e1b950bb..56ed2175df60a 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/30_tokenizers.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/30_tokenizers.yml @@ -24,9 +24,6 @@ --- "ngram_exception": - - skip: - version: " - 6.99.99" - reason: only starting from version 7.x this throws an error - do: catch: /The difference between max_gram and min_gram in NGram Tokenizer must be less than or equal to[:] \[1\] but was \[2\]\. This limit can be set by changing the \[index.max_ngram_diff\] index level setting\./ indices.analyze: diff --git a/modules/ingest-common/src/internalClusterTest/java/org/opensearch/ingest/common/IngestRestartIT.java b/modules/ingest-common/src/internalClusterTest/java/org/opensearch/ingest/common/IngestRestartIT.java index babf024da019b..aeaa7246f33b8 100644 --- a/modules/ingest-common/src/internalClusterTest/java/org/opensearch/ingest/common/IngestRestartIT.java +++ b/modules/ingest-common/src/internalClusterTest/java/org/opensearch/ingest/common/IngestRestartIT.java @@ -114,7 +114,7 @@ public void testFailureInConditionalProcessor() { Exception e = expectThrows( Exception.class, - () -> client().prepareIndex("index", "doc") + () -> client().prepareIndex("index") .setId("1") .setSource("x", 0) .setPipeline(pipelineId) @@ -178,7 +178,8 @@ public Settings onNodeStopped(String nodeName) { checkPipelineExists.accept(pipelineIdWithoutScript); checkPipelineExists.accept(pipelineIdWithScript); - client().prepareIndex("index", "doc", "1") + client().prepareIndex("index") + .setId("1") .setSource("x", 0) .setPipeline(pipelineIdWithoutScript) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -186,7 +187,8 @@ public Settings onNodeStopped(String nodeName) { IllegalStateException exception = expectThrows( IllegalStateException.class, - () -> client().prepareIndex("index", "doc", "2") + () -> client().prepareIndex("index") + .setId("2") .setSource("x", 0) .setPipeline(pipelineIdWithScript) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -209,7 +211,7 @@ public Settings onNodeStopped(String nodeName) { ) ); - Map source = client().prepareGet("index", "doc", "1").get().getSource(); + Map source = client().prepareGet("index", "1").get().getSource(); assertThat(source.get("x"), equalTo(0)); assertThat(source.get("y"), equalTo(0)); } @@ -236,13 +238,14 @@ public void testPipelineWithScriptProcessorThatHasStoredScript() throws Exceptio ); client().admin().cluster().preparePutPipeline("_id", pipeline, XContentType.JSON).get(); - client().prepareIndex("index", "doc", "1") + client().prepareIndex("index") + .setId("1") .setSource("x", 0) .setPipeline("_id") .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); - Map source = client().prepareGet("index", "doc", "1").get().getSource(); + Map source = client().prepareGet("index", "1").get().getSource(); assertThat(source.get("x"), equalTo(0)); assertThat(source.get("y"), equalTo(0)); assertThat(source.get("z"), equalTo(0)); @@ -254,13 +257,14 @@ public void testPipelineWithScriptProcessorThatHasStoredScript() throws Exceptio internalCluster().fullRestart(); ensureYellow("index"); - client().prepareIndex("index", "doc", "2") + client().prepareIndex("index") + .setId("2") .setSource("x", 0) .setPipeline("_id") .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); - source = client().prepareGet("index", "doc", "2").get().getSource(); + source = client().prepareGet("index", "2").get().getSource(); assertThat(source.get("x"), equalTo(0)); assertThat(source.get("y"), equalTo(0)); assertThat(source.get("z"), equalTo(0)); @@ -275,26 +279,28 @@ public void testWithDedicatedIngestNode() throws Exception { ); client().admin().cluster().preparePutPipeline("_id", pipeline, XContentType.JSON).get(); - client().prepareIndex("index", "doc", "1") + client().prepareIndex("index") + .setId("1") .setSource("x", 0) .setPipeline("_id") .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); - Map source = client().prepareGet("index", "doc", "1").get().getSource(); + Map source = client().prepareGet("index", "1").get().getSource(); assertThat(source.get("x"), equalTo(0)); assertThat(source.get("y"), equalTo(0)); logger.info("Stopping"); internalCluster().restartNode(node, new InternalTestCluster.RestartCallback()); - client(ingestNode).prepareIndex("index", "doc", "2") + client(ingestNode).prepareIndex("index") + .setId("2") .setSource("x", 0) .setPipeline("_id") .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); - source = client(ingestNode).prepareGet("index", "doc", "2").get().getSource(); + source = client(ingestNode).prepareGet("index", "2").get().getSource(); assertThat(source.get("x"), equalTo(0)); assertThat(source.get("y"), equalTo(0)); } diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ProcessorsWhitelistExtension.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ProcessorsWhitelistExtension.java index 93cb60c5b5296..c45104873c7fd 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ProcessorsWhitelistExtension.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ProcessorsWhitelistExtension.java @@ -44,13 +44,13 @@ public class ProcessorsWhitelistExtension implements PainlessExtension { - private static final Whitelist WHITELIST = WhitelistLoader.loadFromResourceFiles( + private static final Whitelist ALLOWLIST = WhitelistLoader.loadFromResourceFiles( ProcessorsWhitelistExtension.class, "processors_whitelist.txt" ); @Override public Map, List> getContextWhitelists() { - return Collections.singletonMap(IngestScript.CONTEXT, Collections.singletonList(WHITELIST)); + return Collections.singletonMap(IngestScript.CONTEXT, Collections.singletonList(ALLOWLIST)); } } diff --git a/modules/ingest-common/src/main/resources/org/opensearch/ingest/common/processors_whitelist.txt b/modules/ingest-common/src/main/resources/org/opensearch/ingest/common/processors_whitelist.txt index 1372ef2ed03be..7b8c60507887b 100644 --- a/modules/ingest-common/src/main/resources/org/opensearch/ingest/common/processors_whitelist.txt +++ b/modules/ingest-common/src/main/resources/org/opensearch/ingest/common/processors_whitelist.txt @@ -17,7 +17,7 @@ # under the License. # -# This file contains a whitelist of static processor methods that can be accessed from painless +# This file contains a allowlist of static processor methods that can be accessed from painless class org.opensearch.ingest.common.Processors { long bytes(String) diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateIndexNameProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateIndexNameProcessorTests.java index 820ef3a8ee9c2..1ff2aa7fdd629 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateIndexNameProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateIndexNameProcessorTests.java @@ -60,7 +60,6 @@ public void testJavaPattern() throws Exception { ); IngestDocument document = new IngestDocument( "_index", - "_type", "_id", null, null, @@ -83,7 +82,6 @@ public void testTAI64N() throws Exception { ); IngestDocument document = new IngestDocument( "_index", - "_type", "_id", null, null, @@ -104,19 +102,11 @@ public void testUnixMs() throws Exception { "m", "yyyyMMdd" ); - IngestDocument document = new IngestDocument( - "_index", - "_type", - "_id", - null, - null, - null, - Collections.singletonMap("_field", "1000500") - ); + IngestDocument document = new IngestDocument("_index", "_id", null, null, null, Collections.singletonMap("_field", "1000500")); dateProcessor.execute(document); assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); - document = new IngestDocument("_index", "_type", "_id", null, null, null, Collections.singletonMap("_field", 1000500L)); + document = new IngestDocument("_index", "_id", null, null, null, Collections.singletonMap("_field", 1000500L)); dateProcessor.execute(document); assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); } @@ -131,15 +121,7 @@ public void testUnix() throws Exception { "m", "yyyyMMdd" ); - IngestDocument document = new IngestDocument( - "_index", - "_type", - "_id", - null, - null, - null, - Collections.singletonMap("_field", "1000.5") - ); + IngestDocument document = new IngestDocument("_index", "_id", null, null, null, Collections.singletonMap("_field", "1000.5")); dateProcessor.execute(document); assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); } @@ -160,7 +142,7 @@ public void testTemplatedFields() throws Exception { indexNameFormat ); - IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, Collections.singletonMap("_field", date)); + IngestDocument document = new IngestDocument("_index", "_id", null, null, null, Collections.singletonMap("_field", date)); dateProcessor.execute(document); assertThat( diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DissectProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DissectProcessorTests.java index 6f44b81e7b43b..ca0c0df40f009 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DissectProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DissectProcessorTests.java @@ -55,7 +55,6 @@ public class DissectProcessorTests extends OpenSearchTestCase { public void testMatch() { IngestDocument ingestDocument = new IngestDocument( "_index", - "_type", "_id", null, null, @@ -72,7 +71,6 @@ public void testMatch() { public void testMatchOverwrite() { IngestDocument ingestDocument = new IngestDocument( "_index", - "_type", "_id", null, null, @@ -90,7 +88,6 @@ public void testMatchOverwrite() { public void testAdvancedMatch() { IngestDocument ingestDocument = new IngestDocument( "_index", - "_type", "_id", null, null, @@ -116,7 +113,6 @@ public void testAdvancedMatch() { public void testMiss() { IngestDocument ingestDocument = new IngestDocument( "_index", - "_type", "_id", null, null, diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ForEachProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ForEachProcessorTests.java index f0c61700f4db0..8db3cefc3a6fd 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ForEachProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ForEachProcessorTests.java @@ -61,15 +61,7 @@ public void testExecuteWithAsyncProcessor() throws Exception { values.add("foo"); values.add("bar"); values.add("baz"); - IngestDocument ingestDocument = new IngestDocument( - "_index", - "_type", - "_id", - null, - null, - null, - Collections.singletonMap("values", values) - ); + IngestDocument ingestDocument = new IngestDocument("_index", "_id", null, null, null, Collections.singletonMap("values", values)); ForEachProcessor processor = new ForEachProcessor("_tag", null, "values", new AsyncUpperCaseProcessor("_ingest._value"), false); processor.execute(ingestDocument, (result, e) -> {}); @@ -87,7 +79,6 @@ public void testExecuteWithAsyncProcessor() throws Exception { public void testExecuteWithFailure() throws Exception { IngestDocument ingestDocument = new IngestDocument( "_index", - "_type", "_id", null, null, @@ -132,15 +123,7 @@ public void testMetadataAvailable() throws Exception { List> values = new ArrayList<>(); values.add(new HashMap<>()); values.add(new HashMap<>()); - IngestDocument ingestDocument = new IngestDocument( - "_index", - "_type", - "_id", - null, - null, - null, - Collections.singletonMap("values", values) - ); + IngestDocument ingestDocument = new IngestDocument("_index", "_id", null, null, null, Collections.singletonMap("values", values)); TestProcessor innerProcessor = new TestProcessor(id -> { id.setFieldValue("_ingest._value.index", id.getSourceAndMetadata().get("_index")); @@ -152,10 +135,8 @@ public void testMetadataAvailable() throws Exception { assertThat(innerProcessor.getInvokedCounter(), equalTo(2)); assertThat(ingestDocument.getFieldValue("values.0.index", String.class), equalTo("_index")); - assertThat(ingestDocument.getFieldValue("values.0.type", String.class), equalTo("_type")); assertThat(ingestDocument.getFieldValue("values.0.id", String.class), equalTo("_id")); assertThat(ingestDocument.getFieldValue("values.1.index", String.class), equalTo("_index")); - assertThat(ingestDocument.getFieldValue("values.1.type", String.class), equalTo("_type")); assertThat(ingestDocument.getFieldValue("values.1.id", String.class), equalTo("_id")); } @@ -170,7 +151,7 @@ public void testRestOfTheDocumentIsAvailable() throws Exception { document.put("values", values); document.put("flat_values", new ArrayList<>()); document.put("other", "value"); - IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, document); + IngestDocument ingestDocument = new IngestDocument("_index", "_id", null, null, null, document); ForEachProcessor processor = new ForEachProcessor( "_tag", @@ -220,15 +201,7 @@ public String getDescription() { int numValues = randomIntBetween(1, 10000); List values = IntStream.range(0, numValues).mapToObj(i -> "").collect(Collectors.toList()); - IngestDocument ingestDocument = new IngestDocument( - "_index", - "_type", - "_id", - null, - null, - null, - Collections.singletonMap("values", values) - ); + IngestDocument ingestDocument = new IngestDocument("_index", "_id", null, null, null, Collections.singletonMap("values", values)); ForEachProcessor processor = new ForEachProcessor("_tag", null, "values", innerProcessor, false); processor.execute(ingestDocument, (result, e) -> {}); @@ -244,15 +217,7 @@ public void testModifyFieldsOutsideArray() throws Exception { values.add("string"); values.add(1); values.add(null); - IngestDocument ingestDocument = new IngestDocument( - "_index", - "_type", - "_id", - null, - null, - null, - Collections.singletonMap("values", values) - ); + IngestDocument ingestDocument = new IngestDocument("_index", "_id", null, null, null, Collections.singletonMap("values", values)); TemplateScript.Factory template = new TestTemplateService.MockTemplateScript.Factory("errors"); @@ -290,7 +255,7 @@ public void testScalarValueAllowsUnderscoreValueFieldToRemainAccessible() throws Map source = new HashMap<>(); source.put("_value", "new_value"); source.put("values", values); - IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, source); + IngestDocument ingestDocument = new IngestDocument("_index", "_id", null, null, null, source); TestProcessor processor = new TestProcessor( doc -> doc.setFieldValue("_ingest._value", doc.getFieldValue("_source._value", String.class)) @@ -320,15 +285,7 @@ public void testNestedForEach() throws Exception { value.put("values2", innerValues); values.add(value); - IngestDocument ingestDocument = new IngestDocument( - "_index", - "_type", - "_id", - null, - null, - null, - Collections.singletonMap("values1", values) - ); + IngestDocument ingestDocument = new IngestDocument("_index", "_id", null, null, null, Collections.singletonMap("values1", values)); TestProcessor testProcessor = new TestProcessor( doc -> doc.setFieldValue("_ingest._value", doc.getFieldValue("_ingest._value", String.class).toUpperCase(Locale.ENGLISH)) @@ -352,7 +309,7 @@ public void testNestedForEach() throws Exception { } public void testIgnoreMissing() throws Exception { - IngestDocument originalIngestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, Collections.emptyMap()); + IngestDocument originalIngestDocument = new IngestDocument("_index", "_id", null, null, null, Collections.emptyMap()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); TestProcessor testProcessor = new TestProcessor(doc -> {}); ForEachProcessor processor = new ForEachProcessor("_tag", null, "_ingest._value", testProcessor, true); @@ -363,7 +320,7 @@ public void testIgnoreMissing() throws Exception { public void testAppendingToTheSameField() { Map source = Collections.singletonMap("field", Arrays.asList("a", "b")); - IngestDocument originalIngestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, source); + IngestDocument originalIngestDocument = new IngestDocument("_index", "_id", null, null, null, source); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); TestProcessor testProcessor = new TestProcessor(id -> id.appendFieldValue("field", "a")); ForEachProcessor processor = new ForEachProcessor("_tag", null, "field", testProcessor, true); @@ -375,7 +332,7 @@ public void testAppendingToTheSameField() { public void testRemovingFromTheSameField() { Map source = Collections.singletonMap("field", Arrays.asList("a", "b")); - IngestDocument originalIngestDocument = new IngestDocument("_index", "_id", "_type", null, null, null, source); + IngestDocument originalIngestDocument = new IngestDocument("_index", "_id", null, null, null, source); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); TestProcessor testProcessor = new TestProcessor(id -> id.removeField("field.0")); ForEachProcessor processor = new ForEachProcessor("_tag", null, "field", testProcessor, true); diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml index 2224d56165fd3..e012a82b15927 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml @@ -32,7 +32,6 @@ teardown: "docs": [ { "_index": "index", - "_type": "type", "_id": "id", "_source": { "foo": "bar" @@ -66,7 +65,6 @@ teardown: "docs": [ { "_index": "index", - "_type": "type", "_id": "id", "_source": { "foo": "bar" @@ -97,7 +95,6 @@ teardown: "docs": [ { "_index": "index", - "_type": "type", "_id": "id", "_source": { "foo": "bar" @@ -112,7 +109,7 @@ teardown: - match: { error.root_cause.0.property_name: "field" } --- -"Test simulate without index type and id": +"Test simulate without id": - do: ingest.simulate: body: > @@ -166,7 +163,6 @@ teardown: "docs": [ { "_index": "index", - "_type": "type", "_id": "id", "_source": { "foo": "bar" @@ -190,7 +186,6 @@ teardown: "docs": [ { "_index": "index", - "_type": "type", "_id": "id", "_source": { "foo": "bar" @@ -223,7 +218,6 @@ teardown: "docs": [ { "_index": "index", - "_type": "type", "_id": "id", "_source": { "foo": "bar" @@ -275,7 +269,6 @@ teardown: "docs": [ { "_index": "index", - "_type": "type", "_id": "id", "_source": { "foo": { @@ -335,7 +328,6 @@ teardown: "docs": [ { "_index": "index", - "_type": "type", "_id": "id", "_source": { "not_foo": "bar" @@ -343,7 +335,6 @@ teardown: }, { "_index": "index", - "_type": "type", "_id": "id2", "_source": { "foo": "bar" @@ -383,7 +374,6 @@ teardown: "docs": [ { "_index": "index", - "_type": "type", "_id": "id", "_source": { "foo": "bar", @@ -392,7 +382,6 @@ teardown: }, { "_index": "index", - "_type": "type", "_id": "id2", "_source": { "foo": "5", @@ -525,7 +514,6 @@ teardown: "docs": [ { "_index": "index", - "_type": "type", "_id": "id", "_source": { "field1": "123.42 400 " @@ -602,7 +590,6 @@ teardown: "docs": [ { "_index": "index", - "_type": "type", "_id": "id", "_source": { "field1": "123.42 400 " @@ -655,7 +642,6 @@ teardown: "docs": [ { "_index": "index", - "_type": "type", "_id": "id", "_source": { "field1": "123.42 400 " @@ -729,7 +715,6 @@ teardown: "docs": [ { "_index": "index", - "_type": "type", "_id": "id", "_source": { "field1": "123.42 400 " @@ -804,7 +789,6 @@ teardown: "docs": [ { "_index": "index", - "_type": "type", "_id": "id", "_source": { "field1": "123.42 400 " diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle index 4eedf598c3f87..f78dc49e9fb8a 100644 --- a/modules/ingest-geoip/build.gradle +++ b/modules/ingest-geoip/build.gradle @@ -39,11 +39,11 @@ opensearchplugin { } dependencies { - api('com.maxmind.geoip2:geoip2:2.13.1') + api('com.maxmind.geoip2:geoip2:2.16.1') // geoip2 dependencies: api("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}") api("com.fasterxml.jackson.core:jackson-databind:${versions.jackson}") - api('com.maxmind.db:maxmind-db:1.3.1') + api('com.maxmind.db:maxmind-db:2.0.0') testImplementation 'org.elasticsearch:geolite2-databases:20191119' } @@ -71,10 +71,8 @@ tasks.named("thirdPartyAudit").configure { ignoreMissingClasses( // geoip WebServiceClient needs apache http client, but we're not using WebServiceClient: 'org.apache.http.HttpEntity', - 'org.apache.http.HttpHost', 'org.apache.http.HttpResponse', 'org.apache.http.StatusLine', - 'org.apache.http.auth.UsernamePasswordCredentials', 'org.apache.http.client.config.RequestConfig$Builder', 'org.apache.http.client.config.RequestConfig', 'org.apache.http.client.methods.CloseableHttpResponse', diff --git a/modules/ingest-geoip/licenses/geoip2-2.13.1.jar.sha1 b/modules/ingest-geoip/licenses/geoip2-2.13.1.jar.sha1 deleted file mode 100644 index 253d9f12e7a3a..0000000000000 --- a/modules/ingest-geoip/licenses/geoip2-2.13.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f27d1a49d5a29dd4a7ac5006ce2eb16b8b9bb888 \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/geoip2-2.16.1.jar.sha1 b/modules/ingest-geoip/licenses/geoip2-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..0221476794d3a --- /dev/null +++ b/modules/ingest-geoip/licenses/geoip2-2.16.1.jar.sha1 @@ -0,0 +1 @@ +c92040bd6ef2cb59be71c6749d08c141ca546caf \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/maxmind-db-1.3.1.jar.sha1 b/modules/ingest-geoip/licenses/maxmind-db-1.3.1.jar.sha1 deleted file mode 100644 index aebff2c3a849c..0000000000000 --- a/modules/ingest-geoip/licenses/maxmind-db-1.3.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -211bca628225bc0f719051b16deb03a747d7a14f \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/maxmind-db-2.0.0.jar.sha1 b/modules/ingest-geoip/licenses/maxmind-db-2.0.0.jar.sha1 new file mode 100644 index 0000000000000..32c18f89c6a29 --- /dev/null +++ b/modules/ingest-geoip/licenses/maxmind-db-2.0.0.jar.sha1 @@ -0,0 +1 @@ +e7e0fd82da0a160b7928ba214e699a7e6a74fff4 \ No newline at end of file diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/opensearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/opensearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java index 2ef5d8da000b1..e88c77b8e33f4 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/opensearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/opensearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java @@ -167,7 +167,7 @@ public void testLazyLoading() throws IOException { internalCluster().getInstance(IngestService.class, ingestNode); // the geo-IP database should not be loaded yet as we have no indexed any documents using a pipeline that has a geo-IP processor assertDatabaseLoadStatus(ingestNode, false); - final IndexRequest indexRequest = new IndexRequest("index", "_doc"); + final IndexRequest indexRequest = new IndexRequest("index"); indexRequest.setPipeline("geoip"); indexRequest.source(Collections.singletonMap("ip", "1.1.1.1")); final IndexResponse indexResponse = client().index(indexRequest).actionGet(); diff --git a/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 15ca93e0fbae4..cda2f5692b0db 100644 --- a/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -286,7 +286,7 @@ public void testLazyLoading() throws Exception { } final Map field = Collections.singletonMap("_field", "1.1.1.1"); - final IngestDocument document = new IngestDocument("index", "type", "id", "routing", 1L, VersionType.EXTERNAL, field); + final IngestDocument document = new IngestDocument("index", "id", "routing", 1L, VersionType.EXTERNAL, field); Map config = new HashMap<>(); config.put("field", "_field"); @@ -343,7 +343,7 @@ public void testLoadingCustomDatabase() throws IOException { } final Map field = Collections.singletonMap("_field", "1.1.1.1"); - final IngestDocument document = new IngestDocument("index", "type", "id", "routing", 1L, VersionType.EXTERNAL, field); + final IngestDocument document = new IngestDocument("index", "id", "routing", 1L, VersionType.EXTERNAL, field); Map config = new HashMap<>(); config.put("field", "_field"); diff --git a/modules/lang-expression/build.gradle b/modules/lang-expression/build.gradle index f7d5b7d039afc..e3feacd71f060 100644 --- a/modules/lang-expression/build.gradle +++ b/modules/lang-expression/build.gradle @@ -37,10 +37,10 @@ opensearchplugin { dependencies { api "org.apache.lucene:lucene-expressions:${versions.lucene}" - api 'org.antlr:antlr4-runtime:4.5.1-1' - api 'org.ow2.asm:asm:5.0.4' - api 'org.ow2.asm:asm-commons:5.0.4' - api 'org.ow2.asm:asm-tree:5.0.4' + api 'org.antlr:antlr4-runtime:4.9.3' + api 'org.ow2.asm:asm:9.2' + api 'org.ow2.asm:asm-commons:9.2' + api 'org.ow2.asm:asm-tree:9.2' } restResources { restApi { diff --git a/modules/lang-expression/licenses/antlr4-runtime-4.5.1-1.jar.sha1 b/modules/lang-expression/licenses/antlr4-runtime-4.5.1-1.jar.sha1 deleted file mode 100644 index f15e50069ba63..0000000000000 --- a/modules/lang-expression/licenses/antlr4-runtime-4.5.1-1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -66144204f9d6d7d3f3f775622c2dd7e9bd511d97 diff --git a/modules/lang-expression/licenses/antlr4-runtime-4.9.3.jar.sha1 b/modules/lang-expression/licenses/antlr4-runtime-4.9.3.jar.sha1 new file mode 100644 index 0000000000000..13a2367439ede --- /dev/null +++ b/modules/lang-expression/licenses/antlr4-runtime-4.9.3.jar.sha1 @@ -0,0 +1 @@ +81befc16ebedb8b8aea3e4c0835dd5ca7e8523a8 \ No newline at end of file diff --git a/modules/lang-expression/licenses/asm-5.0.4.jar.sha1 b/modules/lang-expression/licenses/asm-5.0.4.jar.sha1 deleted file mode 100644 index 9223dba380f8c..0000000000000 --- a/modules/lang-expression/licenses/asm-5.0.4.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0da08b8cce7bbf903602a25a3a163ae252435795 diff --git a/modules/lang-expression/licenses/asm-9.2.jar.sha1 b/modules/lang-expression/licenses/asm-9.2.jar.sha1 new file mode 100644 index 0000000000000..28f456d3cbcb2 --- /dev/null +++ b/modules/lang-expression/licenses/asm-9.2.jar.sha1 @@ -0,0 +1 @@ +81a03f76019c67362299c40e0ba13405f5467bff \ No newline at end of file diff --git a/modules/lang-expression/licenses/asm-commons-5.0.4.jar.sha1 b/modules/lang-expression/licenses/asm-commons-5.0.4.jar.sha1 deleted file mode 100644 index 94fe0cd92c9c9..0000000000000 --- a/modules/lang-expression/licenses/asm-commons-5.0.4.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5a556786086c23cd689a0328f8519db93821c04c diff --git a/modules/lang-expression/licenses/asm-commons-9.2.jar.sha1 b/modules/lang-expression/licenses/asm-commons-9.2.jar.sha1 new file mode 100644 index 0000000000000..7beb3d29afe86 --- /dev/null +++ b/modules/lang-expression/licenses/asm-commons-9.2.jar.sha1 @@ -0,0 +1 @@ +f4d7f0fc9054386f2893b602454d48e07d4fbead \ No newline at end of file diff --git a/modules/lang-expression/licenses/asm-tree-5.0.4.jar.sha1 b/modules/lang-expression/licenses/asm-tree-5.0.4.jar.sha1 deleted file mode 100644 index 5822a485a61ff..0000000000000 --- a/modules/lang-expression/licenses/asm-tree-5.0.4.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -396ce0c07ba2b481f25a70195c7c94922f0d1b0b \ No newline at end of file diff --git a/modules/lang-expression/licenses/asm-tree-9.2.jar.sha1 b/modules/lang-expression/licenses/asm-tree-9.2.jar.sha1 new file mode 100644 index 0000000000000..7b486521ecef3 --- /dev/null +++ b/modules/lang-expression/licenses/asm-tree-9.2.jar.sha1 @@ -0,0 +1 @@ +d96c99a30f5e1a19b0e609dbb19a44d8518ac01e \ No newline at end of file diff --git a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java index 453787fe32972..259234d79ab42 100644 --- a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java +++ b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java @@ -100,7 +100,7 @@ private SearchRequestBuilder buildRequest(String script, Object... params) { public void testBasic() throws Exception { createIndex("test"); ensureGreen("test"); - client().prepareIndex("test", "doc", "1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); SearchResponse rsp = buildRequest("doc['foo'] + 1").get(); assertEquals(1, rsp.getHits().getTotalHits().value); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); @@ -109,7 +109,7 @@ public void testBasic() throws Exception { public void testFunction() throws Exception { createIndex("test"); ensureGreen("test"); - client().prepareIndex("test", "doc", "1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); SearchResponse rsp = buildRequest("doc['foo'] + abs(1)").get(); assertSearchResponse(rsp); assertEquals(1, rsp.getHits().getTotalHits().value); @@ -119,7 +119,7 @@ public void testFunction() throws Exception { public void testBasicUsingDotValue() throws Exception { createIndex("test"); ensureGreen("test"); - client().prepareIndex("test", "doc", "1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); SearchResponse rsp = buildRequest("doc['foo'].value + 1").get(); assertEquals(1, rsp.getHits().getTotalHits().value); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); @@ -130,9 +130,9 @@ public void testScore() throws Exception { ensureGreen("test"); indexRandom( true, - client().prepareIndex("test", "doc", "1").setSource("text", "hello goodbye"), - client().prepareIndex("test", "doc", "2").setSource("text", "hello hello hello goodbye"), - client().prepareIndex("test", "doc", "3").setSource("text", "hello hello goodebye") + client().prepareIndex("test").setId("1").setSource("text", "hello goodbye"), + client().prepareIndex("test").setId("2").setSource("text", "hello hello hello goodbye"), + client().prepareIndex("test").setId("3").setSource("text", "hello hello goodebye") ); ScriptScoreFunctionBuilder score = ScoreFunctionBuilders.scriptFunction( new Script(ScriptType.INLINE, "expression", "1 / _score", Collections.emptyMap()) @@ -162,8 +162,8 @@ public void testDateMethods() throws Exception { ensureGreen("test"); indexRandom( true, - client().prepareIndex("test", "doc", "1").setSource("id", 1, "date0", "2015-04-28T04:02:07Z", "date1", "1985-09-01T23:11:01Z"), - client().prepareIndex("test", "doc", "2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z") + client().prepareIndex("test").setId("1").setSource("id", 1, "date0", "2015-04-28T04:02:07Z", "date1", "1985-09-01T23:11:01Z"), + client().prepareIndex("test").setId("2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z") ); SearchResponse rsp = buildRequest("doc['date0'].getSeconds() - doc['date0'].getMinutes()").get(); assertEquals(2, rsp.getHits().getTotalHits().value); @@ -192,8 +192,8 @@ public void testDateObjectMethods() throws Exception { ensureGreen("test"); indexRandom( true, - client().prepareIndex("test", "doc", "1").setSource("id", 1, "date0", "2015-04-28T04:02:07Z", "date1", "1985-09-01T23:11:01Z"), - client().prepareIndex("test", "doc", "2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z") + client().prepareIndex("test").setId("1").setSource("id", 1, "date0", "2015-04-28T04:02:07Z", "date1", "1985-09-01T23:11:01Z"), + client().prepareIndex("test").setId("2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z") ); SearchResponse rsp = buildRequest("doc['date0'].date.secondOfMinute - doc['date0'].date.minuteOfHour").get(); assertEquals(2, rsp.getHits().getTotalHits().value); @@ -241,9 +241,9 @@ public void testMultiValueMethods() throws Exception { indexRandom( true, - client().prepareIndex("test", "doc", "1").setSource(doc1), - client().prepareIndex("test", "doc", "2").setSource(doc2), - client().prepareIndex("test", "doc", "3").setSource(doc3) + client().prepareIndex("test").setId("1").setSource(doc1), + client().prepareIndex("test").setId("2").setSource(doc2), + client().prepareIndex("test").setId("3").setSource(doc3) ); SearchResponse rsp = buildRequest("doc['double0'].count() + doc['double1'].count()").get(); @@ -324,7 +324,7 @@ public void testMultiValueMethods() throws Exception { public void testInvalidDateMethodCall() throws Exception { OpenSearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "double", "type=double")); ensureGreen("test"); - indexRandom(true, client().prepareIndex("test", "doc", "1").setSource("double", "178000000.0")); + indexRandom(true, client().prepareIndex("test").setId("1").setSource("double", "178000000.0")); try { buildRequest("doc['double'].getYear()").get(); fail(); @@ -347,8 +347,8 @@ public void testSparseField() throws Exception { ensureGreen("test"); indexRandom( true, - client().prepareIndex("test", "doc", "1").setSource("id", 1, "x", 4), - client().prepareIndex("test", "doc", "2").setSource("id", 2, "y", 2) + client().prepareIndex("test").setId("1").setSource("id", 1, "x", 4), + client().prepareIndex("test").setId("2").setSource("id", 2, "y", 2) ); SearchResponse rsp = buildRequest("doc['x'] + 1").get(); OpenSearchAssertions.assertSearchResponse(rsp); @@ -361,7 +361,7 @@ public void testSparseField() throws Exception { public void testMissingField() throws Exception { createIndex("test"); ensureGreen("test"); - client().prepareIndex("test", "doc", "1").setSource("x", 4).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("x", 4).setRefreshPolicy(IMMEDIATE).get(); try { buildRequest("doc['bogus']").get(); fail("Expected missing field to cause failure"); @@ -380,9 +380,9 @@ public void testParams() throws Exception { ensureGreen("test"); indexRandom( true, - client().prepareIndex("test", "doc", "1").setSource("id", 1, "x", 10), - client().prepareIndex("test", "doc", "2").setSource("id", 2, "x", 3), - client().prepareIndex("test", "doc", "3").setSource("id", 3, "x", 5) + client().prepareIndex("test").setId("1").setSource("id", 1, "x", 10), + client().prepareIndex("test").setId("2").setSource("id", 2, "x", 3), + client().prepareIndex("test").setId("3").setSource("id", 3, "x", 5) ); // a = int, b = double, c = long String script = "doc['x'] * a + b + ((c + doc['x']) > 5000000009 ? 1 : 0)"; @@ -395,7 +395,7 @@ public void testParams() throws Exception { } public void testCompileFailure() { - client().prepareIndex("test", "doc", "1").setSource("x", 1).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("x", 1).setRefreshPolicy(IMMEDIATE).get(); try { buildRequest("garbage%@#%@").get(); fail("Expected expression compilation failure"); @@ -406,7 +406,7 @@ public void testCompileFailure() { } public void testNonNumericParam() { - client().prepareIndex("test", "doc", "1").setSource("x", 1).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("x", 1).setRefreshPolicy(IMMEDIATE).get(); try { buildRequest("a", "a", "astring").get(); fail("Expected string parameter to cause failure"); @@ -421,7 +421,7 @@ public void testNonNumericParam() { } public void testNonNumericField() { - client().prepareIndex("test", "doc", "1").setSource("text", "this is not a number").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("text", "this is not a number").setRefreshPolicy(IMMEDIATE).get(); try { buildRequest("doc['text.keyword']").get(); fail("Expected text field to cause execution failure"); @@ -436,7 +436,7 @@ public void testNonNumericField() { } public void testInvalidGlobalVariable() { - client().prepareIndex("test", "doc", "1").setSource("foo", 5).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("foo", 5).setRefreshPolicy(IMMEDIATE).get(); try { buildRequest("bogus").get(); fail("Expected bogus variable to cause execution failure"); @@ -451,7 +451,7 @@ public void testInvalidGlobalVariable() { } public void testDocWithoutField() { - client().prepareIndex("test", "doc", "1").setSource("foo", 5).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("foo", 5).setRefreshPolicy(IMMEDIATE).get(); try { buildRequest("doc").get(); fail("Expected doc variable without field to cause execution failure"); @@ -466,7 +466,7 @@ public void testDocWithoutField() { } public void testInvalidFieldMember() { - client().prepareIndex("test", "doc", "1").setSource("foo", 5).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("foo", 5).setRefreshPolicy(IMMEDIATE).get(); try { buildRequest("doc['foo'].bogus").get(); fail("Expected bogus field member to cause execution failure"); @@ -486,9 +486,9 @@ public void testSpecialValueVariable() throws Exception { ensureGreen("test"); indexRandom( true, - client().prepareIndex("test", "doc", "1").setSource("x", 5, "y", 1.2), - client().prepareIndex("test", "doc", "2").setSource("x", 10, "y", 1.4), - client().prepareIndex("test", "doc", "3").setSource("x", 13, "y", 1.8) + client().prepareIndex("test").setId("1").setSource("x", 5, "y", 1.2), + client().prepareIndex("test").setId("2").setSource("x", 10, "y", 1.4), + client().prepareIndex("test").setId("3").setSource("x", 13, "y", 1.8) ); SearchRequestBuilder req = client().prepareSearch().setIndices("test"); @@ -532,9 +532,9 @@ public void testStringSpecialValueVariable() throws Exception { ensureGreen("test"); indexRandom( true, - client().prepareIndex("test", "doc", "1").setSource("text", "hello"), - client().prepareIndex("test", "doc", "2").setSource("text", "goodbye"), - client().prepareIndex("test", "doc", "3").setSource("text", "hello") + client().prepareIndex("test").setId("1").setSource("text", "hello"), + client().prepareIndex("test").setId("2").setSource("text", "goodbye"), + client().prepareIndex("test").setId("3").setSource("text", "hello") ); SearchRequestBuilder req = client().prepareSearch().setIndices("test"); @@ -564,9 +564,8 @@ public void testInvalidUpdateScript() throws Exception { try { createIndex("test_index"); ensureGreen("test_index"); - indexRandom(true, client().prepareIndex("test_index", "doc", "1").setSource("text_field", "text")); + indexRandom(true, client().prepareIndex("test_index").setId("1").setSource("text_field", "text")); UpdateRequestBuilder urb = client().prepareUpdate().setIndex("test_index"); - urb.setType("doc"); urb.setId("1"); urb.setScript(new Script(ScriptType.INLINE, ExpressionScriptEngine.NAME, "0", Collections.emptyMap())); urb.get(); @@ -585,11 +584,11 @@ public void testPipelineAggregationScript() throws Exception { ensureGreen("agg_index"); indexRandom( true, - client().prepareIndex("agg_index", "doc", "1").setSource("one", 1.0, "two", 2.0, "three", 3.0, "four", 4.0), - client().prepareIndex("agg_index", "doc", "2").setSource("one", 2.0, "two", 2.0, "three", 3.0, "four", 4.0), - client().prepareIndex("agg_index", "doc", "3").setSource("one", 3.0, "two", 2.0, "three", 3.0, "four", 4.0), - client().prepareIndex("agg_index", "doc", "4").setSource("one", 4.0, "two", 2.0, "three", 3.0, "four", 4.0), - client().prepareIndex("agg_index", "doc", "5").setSource("one", 5.0, "two", 2.0, "three", 3.0, "four", 4.0) + client().prepareIndex("agg_index").setId("1").setSource("one", 1.0, "two", 2.0, "three", 3.0, "four", 4.0), + client().prepareIndex("agg_index").setId("2").setSource("one", 2.0, "two", 2.0, "three", 3.0, "four", 4.0), + client().prepareIndex("agg_index").setId("3").setSource("one", 3.0, "two", 2.0, "three", 3.0, "four", 4.0), + client().prepareIndex("agg_index").setId("4").setSource("one", 4.0, "two", 2.0, "three", 3.0, "four", 4.0), + client().prepareIndex("agg_index").setId("5").setSource("one", 5.0, "two", 2.0, "three", 3.0, "four", 4.0) ); SearchResponse response = client().prepareSearch("agg_index") .addAggregation( @@ -649,7 +648,8 @@ public void testGeo() throws Exception { xContentBuilder.endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder)); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("name", "test") @@ -696,9 +696,9 @@ public void testBoolean() throws Exception { ensureGreen(); indexRandom( true, - client().prepareIndex("test", "doc", "1").setSource("id", 1, "price", 1.0, "vip", true), - client().prepareIndex("test", "doc", "2").setSource("id", 2, "price", 2.0, "vip", false), - client().prepareIndex("test", "doc", "3").setSource("id", 3, "price", 2.0, "vip", false) + client().prepareIndex("test").setId("1").setSource("id", 1, "price", 1.0, "vip", true), + client().prepareIndex("test").setId("2").setSource("id", 2, "price", 2.0, "vip", false), + client().prepareIndex("test").setId("3").setSource("id", 3, "price", 2.0, "vip", false) ); // access .value SearchResponse rsp = buildRequest("doc['vip'].value").get(); @@ -729,8 +729,8 @@ public void testFilterScript() throws Exception { ensureGreen("test"); indexRandom( true, - client().prepareIndex("test", "doc", "1").setSource("id", 1, "foo", 1.0), - client().prepareIndex("test", "doc", "2").setSource("id", 2, "foo", 0.0) + client().prepareIndex("test").setId("1").setSource("id", 1, "foo", 1.0), + client().prepareIndex("test").setId("2").setSource("id", 2, "foo", 0.0) ); SearchRequestBuilder builder = buildRequest("doc['foo'].value"); Script script = new Script(ScriptType.INLINE, "expression", "doc['foo'].value", Collections.emptyMap()); diff --git a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java index 2a8236d5e0e4b..5aade265439d2 100644 --- a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java +++ b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java @@ -69,11 +69,9 @@ public void testAllOpsDisabledIndexedScripts() throws IOException { .setId("script1") .setContent(new BytesArray("{\"script\": {\"lang\": \"expression\", \"source\": \"2\"} }"), XContentType.JSON) .get(); - client().prepareIndex("test", "scriptTest", "1").setSource("{\"theField\":\"foo\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{\"theField\":\"foo\"}", XContentType.JSON).get(); try { - client().prepareUpdate("test", "scriptTest", "1") - .setScript(new Script(ScriptType.STORED, null, "script1", Collections.emptyMap())) - .get(); + client().prepareUpdate("test", "1").setScript(new Script(ScriptType.STORED, null, "script1", Collections.emptyMap())).get(); fail("update script should have been rejected"); } catch (Exception e) { assertThat(e.getMessage(), containsString("failed to execute script")); @@ -85,7 +83,6 @@ public void testAllOpsDisabledIndexedScripts() throws IOException { new SearchSourceBuilder().scriptField("test1", new Script(ScriptType.STORED, null, "script1", Collections.emptyMap())) ) .setIndices("test") - .setTypes("scriptTest") .get(); fail("search script should have been rejected"); } catch (Exception e) { diff --git a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionFieldScriptTests.java b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionFieldScriptTests.java index 0e0f21405818b..d0941cbc9452f 100644 --- a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionFieldScriptTests.java +++ b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionFieldScriptTests.java @@ -77,7 +77,7 @@ public void setUp() throws Exception { when(fieldData.load(any())).thenReturn(atomicFieldData); service = new ExpressionScriptEngine(); - lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData, null); + lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData); } private FieldScript.LeafFactory compile(String expression) { diff --git a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionNumberSortScriptTests.java b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionNumberSortScriptTests.java index 83b5c0930d1d0..f3559da59f992 100644 --- a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionNumberSortScriptTests.java +++ b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionNumberSortScriptTests.java @@ -76,7 +76,7 @@ public void setUp() throws Exception { when(fieldData.load(any())).thenReturn(atomicFieldData); service = new ExpressionScriptEngine(); - lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData, null); + lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData); } private NumberSortScript.LeafFactory compile(String expression) { diff --git a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionTermsSetQueryTests.java b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionTermsSetQueryTests.java index a71932ded1a7a..af7fc580f8a65 100644 --- a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionTermsSetQueryTests.java +++ b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionTermsSetQueryTests.java @@ -76,7 +76,7 @@ public void setUp() throws Exception { when(fieldData.load(any())).thenReturn(atomicFieldData); service = new ExpressionScriptEngine(); - lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData, null); + lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData); } private TermsSetQueryScript.LeafFactory compile(String expression) { diff --git a/modules/lang-mustache/build.gradle b/modules/lang-mustache/build.gradle index a26798bf90b91..511a6b144c21a 100644 --- a/modules/lang-mustache/build.gradle +++ b/modules/lang-mustache/build.gradle @@ -38,7 +38,7 @@ opensearchplugin { } dependencies { - api "com.github.spullara.mustache.java:compiler:0.9.6" + api "com.github.spullara.mustache.java:compiler:0.9.10" } restResources { diff --git a/modules/lang-mustache/licenses/compiler-0.9.10.jar.sha1 b/modules/lang-mustache/licenses/compiler-0.9.10.jar.sha1 new file mode 100644 index 0000000000000..6336318c2ce1a --- /dev/null +++ b/modules/lang-mustache/licenses/compiler-0.9.10.jar.sha1 @@ -0,0 +1 @@ +6111ae24e3be9ecbd75f5fe908583fc14b4f0174 \ No newline at end of file diff --git a/modules/lang-mustache/licenses/compiler-0.9.6.jar.sha1 b/modules/lang-mustache/licenses/compiler-0.9.6.jar.sha1 deleted file mode 100644 index 9c0e54641475b..0000000000000 --- a/modules/lang-mustache/licenses/compiler-0.9.6.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1b8707299c34406ed0ba40bbf8513352ac4765c9 \ No newline at end of file diff --git a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java index 5dad1b17cbf4c..617f1f4f738a0 100644 --- a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java +++ b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java @@ -66,7 +66,8 @@ public void testBasic() throws Exception { final int numDocs = randomIntBetween(10, 100); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - indexRequestBuilders[i] = client().prepareIndex("msearch", "test", String.valueOf(i)) + indexRequestBuilders[i] = client().prepareIndex("msearch") + .setId(String.valueOf(i)) .setSource("odd", (i % 2 == 0), "group", (i % 3)); } indexRandom(true, indexRequestBuilders); diff --git a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/SearchTemplateIT.java b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/SearchTemplateIT.java index df53fcc0c3b6f..61f047a32f1c1 100644 --- a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/SearchTemplateIT.java +++ b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/SearchTemplateIT.java @@ -68,8 +68,8 @@ protected Collection> getPlugins() { @Before public void setup() throws IOException { createIndex("test"); - client().prepareIndex("test", "type", "1").setSource(jsonBuilder().startObject().field("text", "value1").endObject()).get(); - client().prepareIndex("test", "type", "2").setSource(jsonBuilder().startObject().field("text", "value2").endObject()).get(); + client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("text", "value1").endObject()).get(); + client().prepareIndex("test").setId("2").setSource(jsonBuilder().startObject().field("text", "value2").endObject()).get(); client().admin().indices().prepareRefresh().get(); } @@ -185,20 +185,22 @@ public void testIndexedTemplateClient() throws Exception { assertNotNull(getResponse.getSource()); BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "1").setSource("{\"theField\":\"foo\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "2").setSource("{\"theField\":\"foo 2\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "3").setSource("{\"theField\":\"foo 3\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "4").setSource("{\"theField\":\"foo 4\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "5").setSource("{\"theField\":\"bar\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("1").setSource("{\"theField\":\"foo\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("2").setSource("{\"theField\":\"foo 2\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("3").setSource("{\"theField\":\"foo 3\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("4").setSource("{\"theField\":\"foo 4\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("5").setSource("{\"theField\":\"bar\"}", XContentType.JSON)); bulkRequestBuilder.get(); client().admin().indices().prepareRefresh().get(); Map templateParams = new HashMap<>(); templateParams.put("fieldParam", "foo"); - SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest( - new SearchRequest("test").types("type") - ).setScript("testTemplate").setScriptType(ScriptType.STORED).setScriptParams(templateParams).get(); + SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test")) + .setScript("testTemplate") + .setScriptType(ScriptType.STORED) + .setScriptParams(templateParams) + .get(); assertHitCount(searchResponse.getResponse(), 4); assertAcked(client().admin().cluster().prepareDeleteStoredScript("testTemplate")); @@ -227,25 +229,27 @@ public void testIndexedTemplate() throws Exception { assertAcked(client().admin().cluster().preparePutStoredScript().setId("3").setContent(new BytesArray(script), XContentType.JSON)); BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "1").setSource("{\"theField\":\"foo\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "2").setSource("{\"theField\":\"foo 2\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "3").setSource("{\"theField\":\"foo 3\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "4").setSource("{\"theField\":\"foo 4\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "5").setSource("{\"theField\":\"bar\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("1").setSource("{\"theField\":\"foo\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("2").setSource("{\"theField\":\"foo 2\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("3").setSource("{\"theField\":\"foo 3\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("4").setSource("{\"theField\":\"foo 4\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("5").setSource("{\"theField\":\"bar\"}", XContentType.JSON)); bulkRequestBuilder.get(); client().admin().indices().prepareRefresh().get(); Map templateParams = new HashMap<>(); templateParams.put("fieldParam", "foo"); - SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest( - new SearchRequest().indices("test").types("type") - ).setScript("1a").setScriptType(ScriptType.STORED).setScriptParams(templateParams).get(); + SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest().indices("test")) + .setScript("1a") + .setScriptType(ScriptType.STORED) + .setScriptParams(templateParams) + .get(); assertHitCount(searchResponse.getResponse(), 4); expectThrows( ResourceNotFoundException.class, - () -> new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest().indices("test").types("type")) + () -> new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest().indices("test")) .setScript("1000") .setScriptType(ScriptType.STORED) .setScriptParams(templateParams) @@ -253,7 +257,7 @@ public void testIndexedTemplate() throws Exception { ); templateParams.put("fieldParam", "bar"); - searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test").types("type")) + searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test")) .setScript("2") .setScriptType(ScriptType.STORED) .setScriptParams(templateParams) @@ -266,9 +270,7 @@ public void testIndexedTemplateOverwrite() throws Exception { createIndex("testindex"); ensureGreen("testindex"); - client().prepareIndex("testindex", "test", "1") - .setSource(jsonBuilder().startObject().field("searchtext", "dev1").endObject()) - .get(); + client().prepareIndex("testindex").setId("1").setSource(jsonBuilder().startObject().field("searchtext", "dev1").endObject()).get(); client().admin().indices().prepareRefresh().get(); int iterations = randomIntBetween(2, 11); @@ -304,7 +306,7 @@ public void testIndexedTemplateOverwrite() throws Exception { IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("testindex").types("test")) + () -> new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("testindex")) .setScript("git01") .setScriptType(ScriptType.STORED) .setScriptParams(templateParams) @@ -320,9 +322,11 @@ public void testIndexedTemplateOverwrite() throws Exception { .setContent(new BytesArray(query.replace("{{slop}}", Integer.toString(0))), XContentType.JSON) ); - SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest( - new SearchRequest("testindex").types("test") - ).setScript("git01").setScriptType(ScriptType.STORED).setScriptParams(templateParams).get(); + SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("testindex")) + .setScript("git01") + .setScriptType(ScriptType.STORED) + .setScriptParams(templateParams) + .get(); assertHitCount(searchResponse.getResponse(), 1); } } @@ -348,11 +352,11 @@ public void testIndexedTemplateWithArray() throws Exception { client().admin().cluster().preparePutStoredScript().setId("4").setContent(new BytesArray(multiQuery), XContentType.JSON) ); BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "1").setSource("{\"theField\":\"foo\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "2").setSource("{\"theField\":\"foo 2\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "3").setSource("{\"theField\":\"foo 3\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "4").setSource("{\"theField\":\"foo 4\"}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("test", "type", "5").setSource("{\"theField\":\"bar\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("1").setSource("{\"theField\":\"foo\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("2").setSource("{\"theField\":\"foo 2\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("3").setSource("{\"theField\":\"foo 3\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("4").setSource("{\"theField\":\"foo 4\"}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("test").setId("5").setSource("{\"theField\":\"bar\"}", XContentType.JSON)); bulkRequestBuilder.get(); client().admin().indices().prepareRefresh().get(); @@ -360,9 +364,11 @@ public void testIndexedTemplateWithArray() throws Exception { String[] fieldParams = { "foo", "bar" }; arrayTemplateParams.put("fieldParam", fieldParams); - SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest( - new SearchRequest("test").types("type") - ).setScript("4").setScriptType(ScriptType.STORED).setScriptParams(arrayTemplateParams).get(); + SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test")) + .setScript("4") + .setScriptType(ScriptType.STORED) + .setScriptParams(arrayTemplateParams) + .get(); assertHitCount(searchResponse.getResponse(), 5); } diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestMultiSearchTemplateAction.java index c4c7ec9bf12b9..52a6fb3756c16 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestMultiSearchTemplateAction.java @@ -33,7 +33,6 @@ package org.opensearch.script.mustache; import org.opensearch.client.node.NodeClient; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.settings.Settings; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; @@ -53,9 +52,6 @@ import static org.opensearch.rest.RestRequest.Method.POST; public class RestMultiSearchTemplateAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestMultiSearchTemplateAction.class); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" - + " Specifying types in multi search template requests is deprecated."; private static final Set RESPONSE_PARAMS; @@ -79,10 +75,7 @@ public List routes() { new Route(GET, "/_msearch/template"), new Route(POST, "/_msearch/template"), new Route(GET, "/{index}/_msearch/template"), - new Route(POST, "/{index}/_msearch/template"), - // Deprecated typed endpoints. - new Route(GET, "/{index}/{type}/_msearch/template"), - new Route(POST, "/{index}/{type}/_msearch/template") + new Route(POST, "/{index}/_msearch/template") ) ); } @@ -95,14 +88,6 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { MultiSearchTemplateRequest multiRequest = parseRequest(request, allowExplicitIndex); - - // Emit a single deprecation message if any search template contains types. - for (SearchTemplateRequest searchTemplateRequest : multiRequest.requests()) { - if (searchTemplateRequest.getRequest().types().length > 0) { - deprecationLogger.deprecate("msearch_with_types", TYPES_DEPRECATION_MESSAGE); - break; - } - } return channel -> client.execute(MultiSearchTemplateAction.INSTANCE, multiRequest, new RestToXContentListener<>(channel)); } diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestSearchTemplateAction.java index b66d275686981..68ba824955468 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestSearchTemplateAction.java @@ -68,10 +68,7 @@ public List routes() { new Route(GET, "/_search/template"), new Route(POST, "/_search/template"), new Route(GET, "/{index}/_search/template"), - new Route(POST, "/{index}/_search/template"), - // Deprecated typed endpoints. - new Route(GET, "/{index}/{type}/_search/template"), - new Route(POST, "/{index}/{type}/_search/template") + new Route(POST, "/{index}/_search/template") ) ); } diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MultiSearchTemplateRequestTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MultiSearchTemplateRequestTests.java index aaf3126876a59..1a663dcb18235 100644 --- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MultiSearchTemplateRequestTests.java +++ b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MultiSearchTemplateRequestTests.java @@ -69,13 +69,10 @@ public void testParseRequest() throws Exception { assertThat(request.requests().get(0).getRequest().preference(), nullValue()); assertThat(request.requests().get(1).getRequest().indices()[0], equalTo("test2")); assertThat(request.requests().get(1).getRequest().indices()[1], equalTo("test3")); - assertThat(request.requests().get(1).getRequest().types()[0], equalTo("type1")); assertThat(request.requests().get(1).getRequest().requestCache(), nullValue()); assertThat(request.requests().get(1).getRequest().preference(), equalTo("_local")); assertThat(request.requests().get(2).getRequest().indices()[0], equalTo("test4")); assertThat(request.requests().get(2).getRequest().indices()[1], equalTo("test1")); - assertThat(request.requests().get(2).getRequest().types()[0], equalTo("type2")); - assertThat(request.requests().get(2).getRequest().types()[1], equalTo("type1")); assertThat(request.requests().get(2).getRequest().routing(), equalTo("123")); assertNotNull(request.requests().get(0).getScript()); assertNotNull(request.requests().get(1).getScript()); diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/RestMultiSearchTemplateActionTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/RestMultiSearchTemplateActionTests.java deleted file mode 100644 index 655d49a0273b5..0000000000000 --- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/RestMultiSearchTemplateActionTests.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.script.mustache; - -import org.opensearch.common.bytes.BytesArray; -import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.rest.RestRequest; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.junit.Before; - -import java.nio.charset.StandardCharsets; - -public class RestMultiSearchTemplateActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - controller().registerHandler(new RestMultiSearchTemplateAction(Settings.EMPTY)); - } - - public void testTypeInPath() { - String content = "{ \"index\": \"some_index\" } \n" + "{\"source\": {\"query\" : {\"match_all\" :{}}}} \n"; - BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8)); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) - .withPath("/some_index/some_type/_msearch/template") - .withContent(bytesContent, XContentType.JSON) - .build(); - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - - dispatchRequest(request); - assertWarnings(RestMultiSearchTemplateAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeInBody() { - String content = "{ \"index\": \"some_index\", \"type\": \"some_type\" } \n" + "{\"source\": {\"query\" : {\"match_all\" :{}}}} \n"; - BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8)); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/some_index/_msearch/template") - .withContent(bytesContent, XContentType.JSON) - .build(); - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - - dispatchRequest(request); - assertWarnings(RestMultiSearchTemplateAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/RestSearchTemplateActionTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/RestSearchTemplateActionTests.java deleted file mode 100644 index 4f95da755f8fc..0000000000000 --- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/RestSearchTemplateActionTests.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.script.mustache; - -import org.opensearch.rest.RestRequest; -import org.opensearch.rest.action.search.RestSearchAction; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.junit.Before; - -import java.util.HashMap; -import java.util.Map; - -public class RestSearchTemplateActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - controller().registerHandler(new RestSearchTemplateAction()); - } - - public void testTypeInPath() { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) - .withPath("/some_index/some_type/_search/template") - .build(); - - dispatchRequest(request); - assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeParameter() { - Map params = new HashMap<>(); - params.put("type", "some_type"); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) - .withPath("/some_index/_search/template") - .withParams(params) - .build(); - - dispatchRequest(request); - assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateResponseTests.java index 84734e55e241c..0a2bb247e3c1a 100644 --- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateResponseTests.java +++ b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateResponseTests.java @@ -36,7 +36,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.ShardSearchFailure; import org.opensearch.common.bytes.BytesReference; -import org.opensearch.common.text.Text; import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; @@ -183,7 +182,7 @@ public void testSourceToXContent() throws IOException { } public void testSearchResponseToXContent() throws IOException { - SearchHit hit = new SearchHit(1, "id", new Text("type"), Collections.emptyMap(), Collections.emptyMap()); + SearchHit hit = new SearchHit(1, "id", Collections.emptyMap(), Collections.emptyMap()); hit.score(2.0f); SearchHit[] hits = new SearchHit[] { hit }; @@ -229,7 +228,6 @@ public void testSearchResponseToXContent() throws IOException { .field("max_score", 1.5F) .startArray("hits") .startObject() - .field("_type", "type") .field("_id", "id") .field("_score", 2.0F) .endObject() diff --git a/modules/lang-mustache/src/test/resources/org/opensearch/script/mustache/simple-msearch-template.json b/modules/lang-mustache/src/test/resources/org/opensearch/script/mustache/simple-msearch-template.json index 11a0091492c4d..1809b4012fde1 100644 --- a/modules/lang-mustache/src/test/resources/org/opensearch/script/mustache/simple-msearch-template.json +++ b/modules/lang-mustache/src/test/resources/org/opensearch/script/mustache/simple-msearch-template.json @@ -1,6 +1,6 @@ {"index":["test0", "test1"], "request_cache": true} {"source": {"query" : {"match_{{template}}" :{}}}, "params": {"template": "all" } } -{"index" : "test2,test3", "type" : "type1", "preference": "_local"} +{"index" : "test2,test3", "preference": "_local"} {"source": {"query" : {"match_{{template}}" :{}}}, "params": {"template": "all" } } -{"index" : ["test4", "test1"], "type" : [ "type2", "type1" ], "routing": "123"} +{"index" : ["test4", "test1"], "routing": "123"} {"source": {"query" : {"match_{{template}}" :{}}}, "params": {"template": "all" } } diff --git a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/30_search_template.yml b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/30_search_template.yml index 22192530b9ec1..a9d3c2da68617 100644 --- a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/30_search_template.yml +++ b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/30_search_template.yml @@ -141,10 +141,6 @@ --- "Test with new response format": - - skip: - version: " - 6.99.99" - reason: hits.total is returned as an object in 7.0.0 - - do: index: index: test diff --git a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yml b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yml index fa56f5c0f72b1..e92e10b9ad276 100644 --- a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yml +++ b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yml @@ -174,10 +174,6 @@ setup: --- "Test with rest_total_hits_as_int": - - skip: - version: " - 6.99.99" - reason: hits.total is returned as an object in 7.0.0 - - do: put_script: id: stored_template_1 diff --git a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/60_typed_keys.yml b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/60_typed_keys.yml index d59bfa9ffc322..accb55624dd06 100644 --- a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/60_typed_keys.yml +++ b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/60_typed_keys.yml @@ -25,15 +25,15 @@ setup: bulk: refresh: true body: - - '{"index": {"_index": "test-0", "_type": "_doc"}}' + - '{"index": {"_index": "test-0"}}' - '{"ip": "10.0.0.1", "integer": 38, "float": 12.5713, "name": "Ruth", "bool": true}' - - '{"index": {"_index": "test-0", "_type": "_doc"}}' + - '{"index": {"_index": "test-0"}}' - '{"ip": "10.0.0.2", "integer": 42, "float": 15.3393, "name": "Jackie", "surname": "Bowling", "bool": false}' - - '{"index": {"_index": "test-1", "_type": "_doc"}}' + - '{"index": {"_index": "test-1"}}' - '{"ip": "10.0.0.3", "integer": 29, "float": 19.0517, "name": "Stephanie", "bool": true}' - - '{"index": {"_index": "test-1", "_type": "_doc"}}' + - '{"index": {"_index": "test-1"}}' - '{"ip": "10.0.0.4", "integer": 19, "float": 19.3717, "surname": "Hamilton", "bool": true}' - - '{"index": {"_index": "test-2", "_type": "_doc"}}' + - '{"index": {"_index": "test-2"}}' - '{"ip": "10.0.0.5", "integer": 0, "float": 17.3349, "name": "Natalie", "bool": false}' --- diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle index 298f28be8cc54..eb93cdc77fb9c 100644 --- a/modules/lang-painless/build.gradle +++ b/modules/lang-painless/build.gradle @@ -46,11 +46,11 @@ testClusters.all { dependencies { api 'org.antlr:antlr4-runtime:4.5.3' - api 'org.ow2.asm:asm-util:7.2' + api 'org.ow2.asm:asm-util:9.2' api 'org.ow2.asm:asm-tree:7.2' api 'org.ow2.asm:asm-commons:7.2' api 'org.ow2.asm:asm-analysis:7.2' - api 'org.ow2.asm:asm:7.2' + api 'org.ow2.asm:asm:9.2' api project('spi') } diff --git a/modules/lang-painless/licenses/asm-7.2.jar.sha1 b/modules/lang-painless/licenses/asm-7.2.jar.sha1 deleted file mode 100644 index acb97fc1a0249..0000000000000 --- a/modules/lang-painless/licenses/asm-7.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fa637eb67eb7628c915d73762b681ae7ff0b9731 \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-9.2.jar.sha1 b/modules/lang-painless/licenses/asm-9.2.jar.sha1 new file mode 100644 index 0000000000000..28f456d3cbcb2 --- /dev/null +++ b/modules/lang-painless/licenses/asm-9.2.jar.sha1 @@ -0,0 +1 @@ +81a03f76019c67362299c40e0ba13405f5467bff \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-util-7.2.jar.sha1 b/modules/lang-painless/licenses/asm-util-7.2.jar.sha1 deleted file mode 100644 index 6f70a0eea65ab..0000000000000 --- a/modules/lang-painless/licenses/asm-util-7.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a3ae34e57fa8a4040e28247291d0cc3d6b8c7bcf \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-util-9.2.jar.sha1 b/modules/lang-painless/licenses/asm-util-9.2.jar.sha1 new file mode 100644 index 0000000000000..5cb89aa115f30 --- /dev/null +++ b/modules/lang-painless/licenses/asm-util-9.2.jar.sha1 @@ -0,0 +1 @@ +fbc178fc5ba3dab50fd7e8a5317b8b647c8e8946 \ No newline at end of file diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/Whitelist.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/Whitelist.java index 695c8663872b0..b400c7a027fca 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/Whitelist.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/Whitelist.java @@ -39,18 +39,18 @@ import java.util.Objects; /** - * Whitelist contains data structures designed to be used to generate a whitelist of Java classes, + * Allowlist contains data structures designed to be used to generate an allowlist of Java classes, * constructors, methods, and fields that can be used within a Painless script at both compile-time * and run-time. * - * A whitelist consists of several pieces with {@link WhitelistClass}s as the top level. Each + * A Allowlist consists of several pieces with {@link WhitelistClass}s as the top level. Each * {@link WhitelistClass} will contain zero-to-many {@link WhitelistConstructor}s, {@link WhitelistMethod}s, and * {@link WhitelistField}s which are what will be available with a Painless script. See each individual - * whitelist object for more detail. + * allowlist object for more detail. */ public final class Whitelist { - private static final String[] BASE_WHITELIST_FILES = new String[] { + private static final String[] BASE_ALLOWLIST_FILES = new String[] { "org.opensearch.txt", "java.lang.txt", "java.math.txt", @@ -66,37 +66,37 @@ public final class Whitelist { "java.util.stream.txt" }; public static final List BASE_WHITELISTS = Collections.singletonList( - WhitelistLoader.loadFromResourceFiles(Whitelist.class, WhitelistAnnotationParser.BASE_ANNOTATION_PARSERS, BASE_WHITELIST_FILES) + WhitelistLoader.loadFromResourceFiles(Whitelist.class, WhitelistAnnotationParser.BASE_ANNOTATION_PARSERS, BASE_ALLOWLIST_FILES) ); - /** The {@link ClassLoader} used to look up the whitelisted Java classes, constructors, methods, and fields. */ + /** The {@link ClassLoader} used to look up the allowlisted Java classes, constructors, methods, and fields. */ public final ClassLoader classLoader; - /** The {@link List} of all the whitelisted Painless classes. */ + /** The {@link List} of all the allowlisted Painless classes. */ public final List whitelistClasses; - /** The {@link List} of all the whitelisted static Painless methods. */ + /** The {@link List} of all the allowlisted static Painless methods. */ public final List whitelistImportedMethods; - /** The {@link List} of all the whitelisted Painless class bindings. */ + /** The {@link List} of all the allowlisted Painless class bindings. */ public final List whitelistClassBindings; - /** The {@link List} of all the whitelisted Painless instance bindings. */ + /** The {@link List} of all the allowlisted Painless instance bindings. */ public final List whitelistInstanceBindings; /** Standard constructor. All values must be not {@code null}. */ public Whitelist( ClassLoader classLoader, - List whitelistClasses, - List whitelistImportedMethods, - List whitelistClassBindings, - List whitelistInstanceBindings + List allowlistClasses, + List allowlistImportedMethods, + List allowlistClassBindings, + List allowlistInstanceBindings ) { this.classLoader = Objects.requireNonNull(classLoader); - this.whitelistClasses = Collections.unmodifiableList(Objects.requireNonNull(whitelistClasses)); - this.whitelistImportedMethods = Collections.unmodifiableList(Objects.requireNonNull(whitelistImportedMethods)); - this.whitelistClassBindings = Collections.unmodifiableList(Objects.requireNonNull(whitelistClassBindings)); - this.whitelistInstanceBindings = Collections.unmodifiableList(Objects.requireNonNull(whitelistInstanceBindings)); + this.whitelistClasses = Collections.unmodifiableList(Objects.requireNonNull(allowlistClasses)); + this.whitelistImportedMethods = Collections.unmodifiableList(Objects.requireNonNull(allowlistImportedMethods)); + this.whitelistClassBindings = Collections.unmodifiableList(Objects.requireNonNull(allowlistClassBindings)); + this.whitelistInstanceBindings = Collections.unmodifiableList(Objects.requireNonNull(allowlistInstanceBindings)); } } diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClass.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClass.java index 3947be6005448..bf5083998f94b 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClass.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClass.java @@ -42,7 +42,7 @@ /** * Class represents the equivalent of a Java class in Painless complete with super classes, * constructors, methods, and fields. There must be a one-to-one mapping of class names to Java - * classes. Though, since multiple whitelists may be combined into a single whitelist for a + * classes. Though, since multiple allowlists may be combined into a single allowlist for a * specific context, as long as multiple classes representing the same Java class have the same * class name and have legal constructor/method overloading they can be merged together. * @@ -51,7 +51,7 @@ * number of parameters, and multiples methods with the same name are allowed for a single class * as long as they have the same return type and a different number of parameters. * - * Classes will automatically extend other whitelisted classes if the Java class they represent is a + * Classes will automatically extend other allowlisted classes if the Java class they represent is a * subclass of other classes including Java interfaces. */ public final class WhitelistClass { @@ -62,13 +62,13 @@ public final class WhitelistClass { /** The Java class name this class represents. */ public final String javaClassName; - /** The {@link List} of whitelisted ({@link WhitelistConstructor}s) available to this class. */ + /** The {@link List} of allowlisted ({@link WhitelistConstructor}s) available to this class. */ public final List whitelistConstructors; - /** The {@link List} of whitelisted ({@link WhitelistMethod}s) available to this class. */ + /** The {@link List} of allowlisted ({@link WhitelistMethod}s) available to this class. */ public final List whitelistMethods; - /** The {@link List} of whitelisted ({@link WhitelistField}s) available to this class. */ + /** The {@link List} of allowlisted ({@link WhitelistField}s) available to this class. */ public final List whitelistFields; /** The {@link Map} of annotations for this class. */ @@ -78,18 +78,18 @@ public final class WhitelistClass { public WhitelistClass( String origin, String javaClassName, - List whitelistConstructors, - List whitelistMethods, - List whitelistFields, + List allowlistConstructors, + List allowlistMethods, + List allowlistFields, List painlessAnnotations ) { this.origin = Objects.requireNonNull(origin); this.javaClassName = Objects.requireNonNull(javaClassName); - this.whitelistConstructors = Collections.unmodifiableList(Objects.requireNonNull(whitelistConstructors)); - this.whitelistMethods = Collections.unmodifiableList(Objects.requireNonNull(whitelistMethods)); - this.whitelistFields = Collections.unmodifiableList(Objects.requireNonNull(whitelistFields)); + this.whitelistConstructors = Collections.unmodifiableList(Objects.requireNonNull(allowlistConstructors)); + this.whitelistMethods = Collections.unmodifiableList(Objects.requireNonNull(allowlistMethods)); + this.whitelistFields = Collections.unmodifiableList(Objects.requireNonNull(allowlistFields)); if (painlessAnnotations.isEmpty()) { this.painlessAnnotations = Collections.emptyMap(); diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClassBinding.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClassBinding.java index 387453f1ea880..15ce4b84c4b09 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClassBinding.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClassBinding.java @@ -50,7 +50,7 @@ */ public class WhitelistClassBinding { - /** Information about where this constructor was whitelisted from. */ + /** Information about where this constructor was allowlisted from. */ public final String origin; /** The Java class name this class binding targets. */ diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistConstructor.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistConstructor.java index 4b96d727e4ed6..301829968255c 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistConstructor.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistConstructor.java @@ -40,14 +40,14 @@ import java.util.stream.Collectors; /** - * Constructor represents the equivalent of a Java constructor available as a whitelisted class + * Constructor represents the equivalent of a Java constructor available as a allowlisted class * constructor within Painless. Constructors for Painless classes may be accessed exactly as * constructors for Java classes are using the 'new' keyword. Painless classes may have multiple * constructors as long as they comply with arity overloading described for {@link WhitelistClass}. */ public final class WhitelistConstructor { - /** Information about where this constructor was whitelisted from. */ + /** Information about where this constructor was allowlisted from. */ public final String origin; /** diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistField.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistField.java index e6519ea68af29..cc3f6290a6c17 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistField.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistField.java @@ -40,13 +40,13 @@ import java.util.stream.Collectors; /** - * Field represents the equivalent of a Java field available as a whitelisted class field + * Field represents the equivalent of a Java field available as an allowlisted class field * within Painless. Fields for Painless classes may be accessed exactly as fields for Java classes * are using the '.' operator on an existing class variable/field. */ public class WhitelistField { - /** Information about where this method was whitelisted from. */ + /** Information about where this method was allowlisted from. */ public final String origin; /** The field name used to look up the field reflection object. */ diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistInstanceBinding.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistInstanceBinding.java index 6e5994622ecdf..7e9bf0af94bd1 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistInstanceBinding.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistInstanceBinding.java @@ -46,7 +46,7 @@ */ public class WhitelistInstanceBinding { - /** Information about where this constructor was whitelisted from. */ + /** Information about where this constructor was allowlisted from. */ public final String origin; /** The Java instance this instance binding targets. */ diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistLoader.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistLoader.java index 6f123198ab4be..2da6d8fce1d8e 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistLoader.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistLoader.java @@ -54,7 +54,7 @@ public final class WhitelistLoader { /** * Loads and creates a {@link Whitelist} from one to many text files using only the base annotation parsers. - * See {@link #loadFromResourceFiles(Class, Map, String...)} for information on how to structure a whitelist + * See {@link #loadFromResourceFiles(Class, Map, String...)} for information on how to structure an allowlist * text file. */ public static Whitelist loadFromResourceFiles(Class resource, String... filepaths) { @@ -66,17 +66,17 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep * {@link String}s with a single {@link Class} to be be used to load the resources where each {@link String} * is the path of a single text file. The {@link Class}'s {@link ClassLoader} will be used to lookup the Java * reflection objects for each individual {@link Class}, {@link Constructor}, {@link Method}, and {@link Field} - * specified as part of the whitelist in the text file. + * specified as part of the allowlist in the text file. * * A single pass is made through each file to collect all the information about each class, constructor, method, - * and field. Most validation will be done at a later point after all whitelists have been gathered and their + * and field. Most validation will be done at a later point after all allowlists have been gathered and their * merging takes place. * * A painless type name is one of the following: *
    *
  • def - The Painless dynamic type which is automatically included without a need to be - * whitelisted.
  • - *
  • fully-qualified Java type name - Any whitelisted Java class will have the equivalent name as + * allowlisted.
  • + *
  • fully-qualified Java type name - Any allowlisted Java class will have the equivalent name as * a Painless type name with the exception that any dollar symbols used as part of inner classes will * be replaced with dot symbols.
  • *
  • short Java type name - The text after the final dot symbol of any specified Java class. A @@ -84,7 +84,7 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep * as described later.
  • *
* - * The following can be parsed from each whitelist text file: + * The following can be parsed from each allowlist text file: *
    *
  • Blank lines will be ignored by the parser.
  • *
  • Comments may be created starting with a pound '#' symbol and end with a newline. These will @@ -98,19 +98,19 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep *
      *
    • A constructor may be specified starting with an opening parenthesis, followed by a * comma-delimited list of Painless type names corresponding to the type/class names for - * the equivalent Java parameter types (these must be whitelisted as well), a closing + * the equivalent Java parameter types (these must be allowlisted as well), a closing * parenthesis, and a newline.
    • *
    • A method may be specified starting with a Painless type name for the return type, * followed by the Java name of the method (which will also be the Painless name for the * method), an opening parenthesis, a comma-delimited list of Painless type names * corresponding to the type/class names for the equivalent Java parameter types - * (these must be whitelisted as well), a closing parenthesis, and a newline.
    • + * (these must be allowlisted as well), a closing parenthesis, and a newline. *
    • An augmented method may be specified starting with a Painless type name for the return * type, followed by the fully qualified Java name of the class the augmented method is - * part of (this class does not need to be whitelisted), the Java name of the method + * part of (this class does not need to be allowlisted), the Java name of the method * (which will also be the Painless name for the method), an opening parenthesis, a * comma-delimited list of Painless type names corresponding to the type/class names - * for the equivalent Java parameter types (these must be whitelisted as well), a closing + * for the equivalent Java parameter types (these must be allowlisted as well), a closing * parenthesis, and a newline.
    • *
    • A field may be specified starting with a Painless type name for the equivalent Java type * of the field, followed by the Java name of the field (which all be the Painless name @@ -130,7 +130,7 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep * fully-qualified Java class name. Method argument types, method return types, and field types * must be specified with Painless type names (def, fully-qualified, or short) as described earlier. * - * The following example is used to create a single whitelist text file: + * The following example is used to create a single allowlist text file: * * {@code * # primitive types @@ -164,12 +164,12 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep * } */ public static Whitelist loadFromResourceFiles(Class resource, Map parsers, String... filepaths) { - List whitelistClasses = new ArrayList<>(); - List whitelistStatics = new ArrayList<>(); - List whitelistClassBindings = new ArrayList<>(); + List allowlistClasses = new ArrayList<>(); + List allowlistStatics = new ArrayList<>(); + List allowlistClassBindings = new ArrayList<>(); - // Execute a single pass through the whitelist text files. This will gather all the - // constructors, methods, augmented methods, and fields for each whitelisted class. + // Execute a single pass through the allowlist text files. This will gather all the + // constructors, methods, augmented methods, and fields for each allowlisted class. for (String filepath : filepaths) { String line; int number = -1; @@ -181,11 +181,11 @@ public static Whitelist loadFromResourceFiles(Class resource, Map whitelistConstructors = null; - List whitelistMethods = null; - List whitelistFields = null; + List allowlistConstructors = null; + List allowlistMethods = null; + List allowlistFields = null; List classAnnotations = null; while ((line = reader.readLine()) != null) { @@ -197,7 +197,7 @@ public static Whitelist loadFromResourceFiles(Class resource, Map resource, Map(); - whitelistMethods = new ArrayList<>(); - whitelistFields = new ArrayList<>(); + allowlistConstructors = new ArrayList<>(); + allowlistMethods = new ArrayList<>(); + allowlistFields = new ArrayList<>(); } else if (line.startsWith("static_import ")) { // Ensure the final token of the line is '{'. if (line.endsWith("{") == false) { @@ -250,25 +250,25 @@ public static Whitelist loadFromResourceFiles(Class resource, Map resource, Map resource, Map resource, Map resource, Map resource, Map resource, Map resource, Map resource, Map) resource::getClassLoader); - return new Whitelist(loader, whitelistClasses, whitelistStatics, whitelistClassBindings, Collections.emptyList()); + return new Whitelist(loader, allowlistClasses, allowlistStatics, allowlistClassBindings, Collections.emptyList()); } - private static List parseWhitelistAnnotations(Map parsers, String line) { + private static List parseAllowlistAnnotations(Map parsers, String line) { List annotations; diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistMethod.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistMethod.java index 2a8e94206e276..9a57a5a098c19 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistMethod.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistMethod.java @@ -40,7 +40,7 @@ import java.util.stream.Collectors; /** - * Method represents the equivalent of a Java method available as a whitelisted class method + * Method represents the equivalent of a Java method available as an allowlisted class method * within Painless. Methods for Painless classes may be accessed exactly as methods for Java classes * are using the '.' operator on an existing class variable/field. Painless classes may have multiple * methods with the same name as long as they comply with arity overloading described in @@ -50,11 +50,11 @@ * these are known as augmented methods. An augmented method can be added to a class as a part of any * Java class as long as the method is static and the first parameter of the method is the Java class * represented by the class. Note that the augmented method's parent Java class does not need to be - * whitelisted. + * allowlisted. */ public class WhitelistMethod { - /** Information about where this method was whitelisted from. */ + /** Information about where this method was allowlisted from. */ public final String origin; /** diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/annotation/WhitelistAnnotationParser.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/annotation/WhitelistAnnotationParser.java index ec270a3363281..9874c61b2a080 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/annotation/WhitelistAnnotationParser.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/annotation/WhitelistAnnotationParser.java @@ -39,8 +39,8 @@ import java.util.stream.Stream; /** - * WhitelistAnnotationParser is an interface used to define how to - * parse an annotation against any whitelist object while loading. + * AllowlistAnnotationParser is an interface used to define how to + * parse an annotation against any allowlist object while loading. */ public interface WhitelistAnnotationParser { diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/opensearch/painless/Compiler.java index 5d302e2698f1b..eca931d87b68c 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/Compiler.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/Compiler.java @@ -168,7 +168,7 @@ public Loader createLoader(ClassLoader parent) { private final Class scriptClass; /** - * The whitelist the script will use. + * The allowlist the script will use. */ private final PainlessLookup painlessLookup; @@ -182,7 +182,7 @@ public Loader createLoader(ClassLoader parent) { * @param scriptClass The class/interface the script will implement. * @param factoryClass An optional class/interface to create the {@code scriptClass} instance. * @param statefulFactoryClass An optional class/interface to create the {@code factoryClass} instance. - * @param painlessLookup The whitelist the script will use. + * @param painlessLookup The allowlist the script will use. */ Compiler(Class scriptClass, Class factoryClass, Class statefulFactoryClass, PainlessLookup painlessLookup) { this.scriptClass = scriptClass; diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/Def.java b/modules/lang-painless/src/main/java/org/opensearch/painless/Def.java index 1c5b40b0e7166..de6fd5ebc0177 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/Def.java @@ -237,10 +237,10 @@ static MethodHandle arrayLengthGetter(Class arrayType) { *

      *

      * This method traverses {@code recieverClass}'s class hierarchy (including interfaces) - * until it finds a matching whitelisted method. If one is not found, it throws an exception. + * until it finds a matching allowlisted method. If one is not found, it throws an exception. * Otherwise it returns a handle to the matching method. *

      - * @param painlessLookup the whitelist + * @param painlessLookup the allowlist * @param functions user defined functions and lambdas * @param constants available constants to be used if the method has the {@code InjectConstantAnnotation} * @param methodHandlesLookup caller's lookup @@ -249,7 +249,7 @@ static MethodHandle arrayLengthGetter(Class arrayType) { * @param name Name of the method. * @param args bootstrap args passed to callsite * @return pointer to matching method to invoke. never returns null. - * @throws IllegalArgumentException if no matching whitelisted method was found. + * @throws IllegalArgumentException if no matching allowlisted method was found. * @throws Throwable if a method reference cannot be converted to an functional interface */ static MethodHandle lookupMethod( @@ -473,26 +473,26 @@ private static MethodHandle lookupReferenceInternal( *

      * The following field loads are allowed: *

        - *
      • Whitelisted {@code field} from receiver's class or any superclasses. - *
      • Whitelisted method named {@code getField()} from receiver's class/superclasses/interfaces. - *
      • Whitelisted method named {@code isField()} from receiver's class/superclasses/interfaces. + *
      • Allowlisted {@code field} from receiver's class or any superclasses. + *
      • Allowlisted method named {@code getField()} from receiver's class/superclasses/interfaces. + *
      • Allowlisted method named {@code isField()} from receiver's class/superclasses/interfaces. *
      • The {@code length} field of an array. *
      • The value corresponding to a map key named {@code field} when the receiver is a Map. *
      • The value in a list at element {@code field} (integer) when the receiver is a List. *
      *

      * This method traverses {@code recieverClass}'s class hierarchy (including interfaces) - * until it finds a matching whitelisted getter. If one is not found, it throws an exception. + * until it finds a matching allowlisted getter. If one is not found, it throws an exception. * Otherwise it returns a handle to the matching getter. *

      - * @param painlessLookup the whitelist + * @param painlessLookup the allowlist * @param receiverClass Class of the object to retrieve the field from. * @param name Name of the field. * @return pointer to matching field. never returns null. - * @throws IllegalArgumentException if no matching whitelisted field was found. + * @throws IllegalArgumentException if no matching allowlisted field was found. */ static MethodHandle lookupGetter(PainlessLookup painlessLookup, Class receiverClass, String name) { - // first try whitelist + // first try allowlist MethodHandle getter = painlessLookup.lookupRuntimeGetterMethodHandle(receiverClass, name); if (getter != null) { @@ -530,24 +530,24 @@ static MethodHandle lookupGetter(PainlessLookup painlessLookup, Class receive *

      * The following field stores are allowed: *

        - *
      • Whitelisted {@code field} from receiver's class or any superclasses. - *
      • Whitelisted method named {@code setField()} from receiver's class/superclasses/interfaces. + *
      • Allowlisted {@code field} from receiver's class or any superclasses. + *
      • Allowlisted method named {@code setField()} from receiver's class/superclasses/interfaces. *
      • The value corresponding to a map key named {@code field} when the receiver is a Map. *
      • The value in a list at element {@code field} (integer) when the receiver is a List. *
      *

      * This method traverses {@code recieverClass}'s class hierarchy (including interfaces) - * until it finds a matching whitelisted setter. If one is not found, it throws an exception. + * until it finds a matching allowlisted setter. If one is not found, it throws an exception. * Otherwise it returns a handle to the matching setter. *

      - * @param painlessLookup the whitelist + * @param painlessLookup the allowlist * @param receiverClass Class of the object to retrieve the field from. * @param name Name of the field. * @return pointer to matching field. never returns null. - * @throws IllegalArgumentException if no matching whitelisted field was found. + * @throws IllegalArgumentException if no matching allowlisted field was found. */ static MethodHandle lookupSetter(PainlessLookup painlessLookup, Class receiverClass, String name) { - // first try whitelist + // first try allowlist MethodHandle setter = painlessLookup.lookupRuntimeSetterMethodHandle(receiverClass, name); if (setter != null) { diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/DefBootstrap.java b/modules/lang-painless/src/main/java/org/opensearch/painless/DefBootstrap.java index 97e2b6f24666e..0726881b1297f 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/DefBootstrap.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/DefBootstrap.java @@ -53,7 +53,7 @@ * shift operator, and dynamic array index normalize. *

      * When a new type is encountered at the call site, we lookup from the appropriate - * whitelist, and cache with a guard. If we encounter too many types, we stop caching. + * allowlist, and cache with a guard. If we encounter too many types, we stop caching. *

      * Based on the cascaded inlining cache from the JSR 292 cookbook * (https://code.google.com/archive/p/jsr292-cookbook/, BSD license) @@ -166,7 +166,7 @@ static boolean checkClass(Class clazz, Object receiver) { } /** - * Does a slow lookup against the whitelist. + * Does a slow lookup against the allowlist. */ private MethodHandle lookup(int flavor, String name, Class receiver) throws Throwable { switch (flavor) { @@ -470,10 +470,10 @@ static boolean checkBoth(Class left, Class right, Object leftObject, Objec * In addition to ordinary parameters, we also take some parameters defined at the call site: *

        *
      • {@code initialDepth}: initial call site depth. this is used to exercise megamorphic fallback. - *
      • {@code flavor}: type of dynamic call it is (and which part of whitelist to look at). + *
      • {@code flavor}: type of dynamic call it is (and which part of allowlist to look at). *
      • {@code args}: flavor-specific args. *
      - * And we take the {@link PainlessLookup} used to compile the script for whitelist checking. + * And we take the {@link PainlessLookup} used to compile the script for allowlist checking. *

      * see https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-6.html#jvms-6.5.invokedynamic */ diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/FunctionRef.java b/modules/lang-painless/src/main/java/org/opensearch/painless/FunctionRef.java index 097960dfbe620..c6aa266148791 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/FunctionRef.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/FunctionRef.java @@ -59,8 +59,8 @@ */ public class FunctionRef { /** - * Creates a new FunctionRef which will resolve {@code type::call} from the whitelist. - * @param painlessLookup the whitelist against which this script is being compiled + * Creates a new FunctionRef which will resolve {@code type::call} from the allowlist. + * @param painlessLookup the allowlist against which this script is being compiled * @param functionTable user-defined and synthetic methods generated directly on the script class * @param location the character number within the script at compile-time * @param targetClass functional interface type to implement. diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessPlugin.java b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessPlugin.java index 4c693243d2a22..09a23c15f346d 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessPlugin.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessPlugin.java @@ -83,11 +83,11 @@ */ public final class PainlessPlugin extends Plugin implements ScriptPlugin, ExtensiblePlugin, ActionPlugin { - private static final Map, List> whitelists; + private static final Map, List> allowlists; /* - * Contexts from Core that need custom whitelists can add them to the map below. - * Whitelist resources should be added as appropriately named, separate files + * Contexts from Core that need custom allowlists can add them to the map below. + * Allowlist resources should be added as appropriately named, separate files * under Painless' resources */ static { @@ -108,23 +108,23 @@ public final class PainlessPlugin extends Plugin implements ScriptPlugin, Extens ingest.add(WhitelistLoader.loadFromResourceFiles(Whitelist.class, "org.opensearch.ingest.txt")); map.put(IngestScript.CONTEXT, ingest); - whitelists = map; + allowlists = map; } private final SetOnce painlessScriptEngine = new SetOnce<>(); @Override public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { - Map, List> contextsWithWhitelists = new HashMap<>(); + Map, List> contextsWithAllowlists = new HashMap<>(); for (ScriptContext context : contexts) { - // we might have a context that only uses the base whitelists, so would not have been filled in by reloadSPI - List contextWhitelists = whitelists.get(context); - if (contextWhitelists == null) { - contextWhitelists = new ArrayList<>(Whitelist.BASE_WHITELISTS); + // we might have a context that only uses the base allowlists, so would not have been filled in by reloadSPI + List contextAllowlists = allowlists.get(context); + if (contextAllowlists == null) { + contextAllowlists = new ArrayList<>(Whitelist.BASE_WHITELISTS); } - contextsWithWhitelists.put(context, contextWhitelists); + contextsWithAllowlists.put(context, contextAllowlists); } - painlessScriptEngine.set(new PainlessScriptEngine(settings, contextsWithWhitelists)); + painlessScriptEngine.set(new PainlessScriptEngine(settings, contextsWithAllowlists)); return painlessScriptEngine.get(); } @@ -158,7 +158,7 @@ public void loadExtensions(ExtensionLoader loader) { .stream() .flatMap(extension -> extension.getContextWhitelists().entrySet().stream()) .forEach(entry -> { - List existing = whitelists.computeIfAbsent(entry.getKey(), c -> new ArrayList<>(Whitelist.BASE_WHITELISTS)); + List existing = allowlists.computeIfAbsent(entry.getKey(), c -> new ArrayList<>(Whitelist.BASE_WHITELISTS)); existing.addAll(entry.getValue()); }); } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextAction.java b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextAction.java index 4b2125aac244d..a9333fde6b443 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextAction.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextAction.java @@ -68,7 +68,7 @@ import static org.opensearch.rest.RestRequest.Method.GET; /** - * Internal REST API for querying context information about Painless whitelists. + * Internal REST API for querying context information about Painless allowlists. * Commands include the following: *

        *
      • GET /_scripts/painless/_context -- retrieves a list of contexts
      • diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessExecuteAction.java index 4999d5d444673..be26e69ec22d1 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessExecuteAction.java @@ -593,10 +593,9 @@ private static Response prepareRamIndex( try (Directory directory = new ByteBuffersDirectory()) { try (IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig(defaultAnalyzer))) { String index = indexService.index().getName(); - String type = indexService.mapperService().documentMapper().type(); BytesReference document = request.contextSetup.document; XContentType xContentType = request.contextSetup.xContentType; - SourceToParse sourceToParse = new SourceToParse(index, type, "_id", document, xContentType); + SourceToParse sourceToParse = new SourceToParse(index, "_id", document, xContentType); ParsedDocument parsedDocument = indexService.mapperService().documentMapper().parse(sourceToParse); indexWriter.addDocuments(parsedDocument.docs()); try (IndexReader indexReader = DirectoryReader.open(indexWriter)) { diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java index dd8e253db4728..ff3fbc640e990 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java @@ -126,107 +126,107 @@ Class defineBridge(String name, byte[] bytes) { } } - public static PainlessLookup buildFromWhitelists(List whitelists) { + public static PainlessLookup buildFromWhitelists(List allowlists) { PainlessLookupBuilder painlessLookupBuilder = new PainlessLookupBuilder(); String origin = "internal error"; try { - for (Whitelist whitelist : whitelists) { - for (WhitelistClass whitelistClass : whitelist.whitelistClasses) { - origin = whitelistClass.origin; + for (Whitelist allowlist : allowlists) { + for (WhitelistClass allowlistClass : allowlist.whitelistClasses) { + origin = allowlistClass.origin; painlessLookupBuilder.addPainlessClass( - whitelist.classLoader, - whitelistClass.javaClassName, - whitelistClass.painlessAnnotations.containsKey(NoImportAnnotation.class) == false + allowlist.classLoader, + allowlistClass.javaClassName, + allowlistClass.painlessAnnotations.containsKey(NoImportAnnotation.class) == false ); } } - for (Whitelist whitelist : whitelists) { - for (WhitelistClass whitelistClass : whitelist.whitelistClasses) { - String targetCanonicalClassName = whitelistClass.javaClassName.replace('$', '.'); + for (Whitelist allowlist : allowlists) { + for (WhitelistClass allowlistClass : allowlist.whitelistClasses) { + String targetCanonicalClassName = allowlistClass.javaClassName.replace('$', '.'); - for (WhitelistConstructor whitelistConstructor : whitelistClass.whitelistConstructors) { - origin = whitelistConstructor.origin; + for (WhitelistConstructor allowlistConstructor : allowlistClass.whitelistConstructors) { + origin = allowlistConstructor.origin; painlessLookupBuilder.addPainlessConstructor( targetCanonicalClassName, - whitelistConstructor.canonicalTypeNameParameters, - whitelistConstructor.painlessAnnotations + allowlistConstructor.canonicalTypeNameParameters, + allowlistConstructor.painlessAnnotations ); } - for (WhitelistMethod whitelistMethod : whitelistClass.whitelistMethods) { - origin = whitelistMethod.origin; + for (WhitelistMethod allowlistMethod : allowlistClass.whitelistMethods) { + origin = allowlistMethod.origin; painlessLookupBuilder.addPainlessMethod( - whitelist.classLoader, + allowlist.classLoader, targetCanonicalClassName, - whitelistMethod.augmentedCanonicalClassName, - whitelistMethod.methodName, - whitelistMethod.returnCanonicalTypeName, - whitelistMethod.canonicalTypeNameParameters, - whitelistMethod.painlessAnnotations + allowlistMethod.augmentedCanonicalClassName, + allowlistMethod.methodName, + allowlistMethod.returnCanonicalTypeName, + allowlistMethod.canonicalTypeNameParameters, + allowlistMethod.painlessAnnotations ); } - for (WhitelistField whitelistField : whitelistClass.whitelistFields) { - origin = whitelistField.origin; + for (WhitelistField allowlistField : allowlistClass.whitelistFields) { + origin = allowlistField.origin; painlessLookupBuilder.addPainlessField( targetCanonicalClassName, - whitelistField.fieldName, - whitelistField.canonicalTypeNameParameter + allowlistField.fieldName, + allowlistField.canonicalTypeNameParameter ); } } - for (WhitelistMethod whitelistStatic : whitelist.whitelistImportedMethods) { - origin = whitelistStatic.origin; + for (WhitelistMethod allowlistStatic : allowlist.whitelistImportedMethods) { + origin = allowlistStatic.origin; painlessLookupBuilder.addImportedPainlessMethod( - whitelist.classLoader, - whitelistStatic.augmentedCanonicalClassName, - whitelistStatic.methodName, - whitelistStatic.returnCanonicalTypeName, - whitelistStatic.canonicalTypeNameParameters, - whitelistStatic.painlessAnnotations + allowlist.classLoader, + allowlistStatic.augmentedCanonicalClassName, + allowlistStatic.methodName, + allowlistStatic.returnCanonicalTypeName, + allowlistStatic.canonicalTypeNameParameters, + allowlistStatic.painlessAnnotations ); } - for (WhitelistClassBinding whitelistClassBinding : whitelist.whitelistClassBindings) { - origin = whitelistClassBinding.origin; + for (WhitelistClassBinding allowlistClassBinding : allowlist.whitelistClassBindings) { + origin = allowlistClassBinding.origin; painlessLookupBuilder.addPainlessClassBinding( - whitelist.classLoader, - whitelistClassBinding.targetJavaClassName, - whitelistClassBinding.methodName, - whitelistClassBinding.returnCanonicalTypeName, - whitelistClassBinding.canonicalTypeNameParameters, - whitelistClassBinding.painlessAnnotations + allowlist.classLoader, + allowlistClassBinding.targetJavaClassName, + allowlistClassBinding.methodName, + allowlistClassBinding.returnCanonicalTypeName, + allowlistClassBinding.canonicalTypeNameParameters, + allowlistClassBinding.painlessAnnotations ); } - for (WhitelistInstanceBinding whitelistInstanceBinding : whitelist.whitelistInstanceBindings) { - origin = whitelistInstanceBinding.origin; + for (WhitelistInstanceBinding allowlistInstanceBinding : allowlist.whitelistInstanceBindings) { + origin = allowlistInstanceBinding.origin; painlessLookupBuilder.addPainlessInstanceBinding( - whitelistInstanceBinding.targetInstance, - whitelistInstanceBinding.methodName, - whitelistInstanceBinding.returnCanonicalTypeName, - whitelistInstanceBinding.canonicalTypeNameParameters + allowlistInstanceBinding.targetInstance, + allowlistInstanceBinding.methodName, + allowlistInstanceBinding.returnCanonicalTypeName, + allowlistInstanceBinding.canonicalTypeNameParameters ); } } } catch (Exception exception) { - throw new IllegalArgumentException("error loading whitelist(s) " + origin, exception); + throw new IllegalArgumentException("error loading allowlist(s) " + origin, exception); } return painlessLookupBuilder.build(); } // javaClassNamesToClasses is all the classes that need to be available to the custom classloader - // including classes used as part of imported methods and class bindings but not necessarily whitelisted + // including classes used as part of imported methods and class bindings but not necessarily allowlisted // individually. The values of javaClassNamesToClasses are a superset of the values of // canonicalClassNamesToClasses. private final Map> javaClassNamesToClasses; - // canonicalClassNamesToClasses is all the whitelisted classes available in a Painless script including + // canonicalClassNamesToClasses is all the allowlisted classes available in a Painless script including // classes with imported canonical names but does not include classes from imported methods or class - // bindings unless also whitelisted separately. The values of canonicalClassNamesToClasses are a subset + // bindings unless also allowlisted separately. The values of canonicalClassNamesToClasses are a subset // of the values of javaClassNamesToClasses. private final Map> canonicalClassNamesToClasses; private final Map, PainlessClassBuilder> classesToPainlessClassBuilders; @@ -2060,7 +2060,7 @@ private void setFunctionalInterfaceMethod(Class targetClass, PainlessClassBui /** * Creates a {@link Map} of PainlessMethodKeys to {@link PainlessMethod}s per {@link PainlessClass} stored as * {@link PainlessClass#runtimeMethods} identical to {@link PainlessClass#methods} with the exception of generated - * bridge methods. A generated bridge method is created for each whitelisted method that has at least one parameter + * bridge methods. A generated bridge method is created for each allowlisted method that has at least one parameter * with a boxed type to cast from other numeric primitive/boxed types in a symmetric was not handled by * {@link MethodHandle#asType(MethodType)}. As an example {@link MethodHandle#asType(MethodType)} legally casts * from {@link Integer} to long but not from int to {@link Long}. Generated bridge methods cover the latter case. diff --git a/modules/lang-painless/src/main/plugin-metadata/plugin-security.policy b/modules/lang-painless/src/main/plugin-metadata/plugin-security.policy index d1e2f88bb166a..ccfd6ba70dd16 100644 --- a/modules/lang-painless/src/main/plugin-metadata/plugin-security.policy +++ b/modules/lang-painless/src/main/plugin-metadata/plugin-security.policy @@ -34,6 +34,6 @@ grant { // needed to generate runtime classes permission java.lang.RuntimePermission "createClassLoader"; - // needed to find the classloader to load whitelisted classes from + // needed to find the classloader to load allowlisted classes from permission java.lang.RuntimePermission "getClassLoader"; }; diff --git a/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/java.util.regex.txt b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/java.util.regex.txt index ab12664824be8..05b3e4fa83cc1 100644 --- a/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/java.util.regex.txt +++ b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/java.util.regex.txt @@ -58,7 +58,7 @@ class java.util.regex.Matcher { String replaceFirst(String) boolean requireEnd() Matcher reset() - # Note: Do not whitelist Matcher.reset(String), it subverts regex limiting + # Note: Do not allowlist Matcher.reset(String), it subverts regex limiting int start() int start(int) Matcher useAnchoringBounds(boolean) diff --git a/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.aggs.movfn.txt b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.aggs.movfn.txt index ed75c44ce9ffc..e314934ed4e56 100644 --- a/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.aggs.movfn.txt +++ b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.aggs.movfn.txt @@ -17,7 +17,7 @@ # under the License. # -# This file contains a whitelist for the Moving Function pipeline aggregator in core +# This file contains an allowlist for the Moving Function pipeline aggregator in core class org.opensearch.search.aggregations.pipeline.MovingFunctions { double max(double[]) diff --git a/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.ingest.txt b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.ingest.txt index 36f3c8e418dd6..cddb8e5f0aa7e 100644 --- a/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.ingest.txt +++ b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.ingest.txt @@ -17,7 +17,7 @@ # under the License. # -# This file contains a whitelist for the ingest scripts +# This file contains an allowlist for the ingest scripts class java.lang.String { String org.opensearch.painless.api.Augmentation sha1() diff --git a/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.score.txt b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.score.txt index 9c8b8fd0d2cb5..cca7e07a95388 100644 --- a/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.score.txt +++ b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.score.txt @@ -17,7 +17,7 @@ # under the License. # -# This file contains a whitelist for functions to be used in Score context +# This file contains an allowlist for functions to be used in Score context class org.opensearch.script.ScoreScript @no_import { } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/AugmentationTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/AugmentationTests.java index d5cd3205b315c..98b0cad9960f8 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/AugmentationTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/AugmentationTests.java @@ -53,9 +53,9 @@ public class AugmentationTests extends ScriptTestCase { @BeforeClass public static void beforeClass() { Map, List> contexts = newDefaultContexts(); - List digestWhitelist = new ArrayList<>(Whitelist.BASE_WHITELISTS); - digestWhitelist.add(WhitelistLoader.loadFromResourceFiles(Whitelist.class, "org.opensearch.ingest.txt")); - contexts.put(DigestTestScript.CONTEXT, digestWhitelist); + List digestAllowlist = new ArrayList<>(Whitelist.BASE_WHITELISTS); + digestAllowlist.add(WhitelistLoader.loadFromResourceFiles(Whitelist.class, "org.opensearch.ingest.txt")); + contexts.put(DigestTestScript.CONTEXT, digestAllowlist); SCRIPT_ENGINE = new PainlessScriptEngine(Settings.EMPTY, contexts); } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/BindingsTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/BindingsTests.java index 65f277741cc81..e5113d93677ab 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/BindingsTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/BindingsTests.java @@ -51,8 +51,8 @@ public class BindingsTests extends ScriptTestCase { @BeforeClass public static void beforeClass() { Map, List> contexts = newDefaultContexts(); - List whitelists = new ArrayList<>(Whitelist.BASE_WHITELISTS); - whitelists.add(WhitelistLoader.loadFromResourceFiles(Whitelist.class, "org.opensearch.painless.test")); + List allowlists = new ArrayList<>(Whitelist.BASE_WHITELISTS); + allowlists.add(WhitelistLoader.loadFromResourceFiles(Whitelist.class, "org.opensearch.painless.test")); InstanceBindingTestClass instanceBindingTestClass = new InstanceBindingTestClass(1); WhitelistInstanceBinding getter = new WhitelistInstanceBinding( @@ -74,16 +74,16 @@ public static void beforeClass() { List instanceBindingsList = new ArrayList<>(); instanceBindingsList.add(getter); instanceBindingsList.add(setter); - Whitelist instanceBindingsWhitelist = new Whitelist( + Whitelist instanceBindingsAllowlist = new Whitelist( instanceBindingTestClass.getClass().getClassLoader(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), instanceBindingsList ); - whitelists.add(instanceBindingsWhitelist); + allowlists.add(instanceBindingsAllowlist); - contexts.put(BindingsTestScript.CONTEXT, whitelists); + contexts.put(BindingsTestScript.CONTEXT, allowlists); SCRIPT_ENGINE = new PainlessScriptEngine(Settings.EMPTY, contexts); } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject.java b/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject.java index 1bc6597b584e4..bf9ad76fc6ceb 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject.java @@ -35,7 +35,7 @@ import java.util.List; import java.util.function.Function; -/** Currently just a dummy class for testing a few features not yet exposed by whitelist! */ +/** Currently just a dummy class for testing a few features not yet exposed by allowlist! */ public class FeatureTestObject { /** static method that returns true */ public static boolean overloadedStatic() { @@ -47,7 +47,7 @@ public static boolean overloadedStatic(boolean whatToReturn) { return whatToReturn; } - /** static method only whitelisted as a static */ + /** static method only allowlisted as a static */ public static float staticAddFloatsTest(float x, float y) { return x + y; } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject2.java b/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject2.java index 9fb0610bc94af..22216e5fb4188 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject2.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject2.java @@ -32,7 +32,7 @@ package org.opensearch.painless; -/** Currently just a dummy class for testing a few features not yet exposed by whitelist! */ +/** Currently just a dummy class for testing a few features not yet exposed by allowlist! */ public class FeatureTestObject2 { public FeatureTestObject2() { super(); diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/RegexTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/RegexTests.java index cb8296a3f233d..8c1f545efcf7a 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/RegexTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/RegexTests.java @@ -172,7 +172,7 @@ public void testNamedGroup() { assertEquals("o", exec("Matcher m = /(?f)(?o)o/.matcher('foo'); m.find(); return m.namedGroup('second')")); } - // Make sure some methods on Pattern are whitelisted + // Make sure some methods on Pattern are allowlisted public void testSplit() { assertArrayEquals(new String[] { "cat", "dog" }, (String[]) exec("/,/.split('cat,dog')")); } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/ScriptTestCase.java b/modules/lang-painless/src/test/java/org/opensearch/painless/ScriptTestCase.java index 488c01c6d1a59..a30aa97d33461 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/ScriptTestCase.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/ScriptTestCase.java @@ -62,9 +62,9 @@ public abstract class ScriptTestCase extends OpenSearchTestCase { /** Creates a new contexts map with PainlessTextScript = org.opensearch.painless.test */ protected static Map, List> newDefaultContexts() { Map, List> contexts = new HashMap<>(); - List whitelists = new ArrayList<>(Whitelist.BASE_WHITELISTS); - whitelists.add(WhitelistLoader.loadFromResourceFiles(Whitelist.class, "org.opensearch.painless.test")); - contexts.put(PainlessTestScript.CONTEXT, whitelists); + List allowlists = new ArrayList<>(Whitelist.BASE_WHITELISTS); + allowlists.add(WhitelistLoader.loadFromResourceFiles(Whitelist.class, "org.opensearch.painless.test")); + contexts.put(PainlessTestScript.CONTEXT, allowlists); return contexts; } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/WhenThingsGoWrongTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/WhenThingsGoWrongTests.java index d0041b22929e1..fb8d2eccfa043 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/WhenThingsGoWrongTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/WhenThingsGoWrongTests.java @@ -206,13 +206,10 @@ public void testDynamicListWrongIndex() { * the parser with right-curly brackets to allow statements to be delimited by them at the end of blocks. */ public void testRCurlyNotDelim() { - IllegalArgumentException e = expectScriptThrows( - IllegalArgumentException.class, - () -> { - // We don't want PICKY here so we get the normal error message - exec("def i = 1} return 1", emptyMap(), emptyMap(), false); - } - ); + IllegalArgumentException e = expectScriptThrows(IllegalArgumentException.class, () -> { + // We don't want PICKY here so we get the normal error message + exec("def i = 1} return 1", emptyMap(), emptyMap(), false); + }); assertEquals("unexpected token ['}'] was expecting one of [{, ';'}].", e.getMessage()); } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/WhitelistLoaderTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/WhitelistLoaderTests.java index 1f5b252cb74a4..e4e754a541414 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/WhitelistLoaderTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/WhitelistLoaderTests.java @@ -70,51 +70,51 @@ public void testUnknownAnnotations() { public void testAnnotations() { Map parsers = new HashMap<>(WhitelistAnnotationParser.BASE_ANNOTATION_PARSERS); parsers.put(AnnotationTestObject.TestAnnotation.NAME, AnnotationTestObject.TestAnnotationParser.INSTANCE); - Whitelist whitelist = WhitelistLoader.loadFromResourceFiles(Whitelist.class, parsers, "org.opensearch.painless.annotation"); + Whitelist allowlist = WhitelistLoader.loadFromResourceFiles(Whitelist.class, parsers, "org.opensearch.painless.annotation"); - assertEquals(1, whitelist.whitelistClasses.size()); + assertEquals(1, allowlist.whitelistClasses.size()); - WhitelistClass whitelistClass = whitelist.whitelistClasses.get(0); + WhitelistClass allowlistClass = allowlist.whitelistClasses.get(0); - assertNotNull(whitelistClass.painlessAnnotations.get(NoImportAnnotation.class)); - assertEquals(1, whitelistClass.painlessAnnotations.size()); - assertEquals(3, whitelistClass.whitelistMethods.size()); + assertNotNull(allowlistClass.painlessAnnotations.get(NoImportAnnotation.class)); + assertEquals(1, allowlistClass.painlessAnnotations.size()); + assertEquals(3, allowlistClass.whitelistMethods.size()); int count = 0; - for (WhitelistMethod whitelistMethod : whitelistClass.whitelistMethods) { - if ("deprecatedMethod".equals(whitelistMethod.methodName)) { + for (WhitelistMethod allowlistMethod : allowlistClass.whitelistMethods) { + if ("deprecatedMethod".equals(allowlistMethod.methodName)) { assertEquals( "use another method", - ((DeprecatedAnnotation) whitelistMethod.painlessAnnotations.get(DeprecatedAnnotation.class)).getMessage() + ((DeprecatedAnnotation) allowlistMethod.painlessAnnotations.get(DeprecatedAnnotation.class)).getMessage() ); - assertEquals(1, whitelistMethod.painlessAnnotations.size()); + assertEquals(1, allowlistMethod.painlessAnnotations.size()); ++count; } - if ("annotatedTestMethod".equals(whitelistMethod.methodName)) { - AnnotationTestObject.TestAnnotation ta = ((AnnotationTestObject.TestAnnotation) whitelistMethod.painlessAnnotations.get( + if ("annotatedTestMethod".equals(allowlistMethod.methodName)) { + AnnotationTestObject.TestAnnotation ta = ((AnnotationTestObject.TestAnnotation) allowlistMethod.painlessAnnotations.get( AnnotationTestObject.TestAnnotation.class )); assertEquals("one", ta.getOne()); assertEquals("two", ta.getTwo()); assertEquals("three", ta.getThree()); - assertEquals(1, whitelistMethod.painlessAnnotations.size()); + assertEquals(1, allowlistMethod.painlessAnnotations.size()); ++count; } - if ("annotatedMultipleMethod".equals(whitelistMethod.methodName)) { + if ("annotatedMultipleMethod".equals(allowlistMethod.methodName)) { assertEquals( "test", - ((DeprecatedAnnotation) whitelistMethod.painlessAnnotations.get(DeprecatedAnnotation.class)).getMessage() + ((DeprecatedAnnotation) allowlistMethod.painlessAnnotations.get(DeprecatedAnnotation.class)).getMessage() ); - AnnotationTestObject.TestAnnotation ta = ((AnnotationTestObject.TestAnnotation) whitelistMethod.painlessAnnotations.get( + AnnotationTestObject.TestAnnotation ta = ((AnnotationTestObject.TestAnnotation) allowlistMethod.painlessAnnotations.get( AnnotationTestObject.TestAnnotation.class )); assertEquals("one", ta.getOne()); assertEquals("two", ta.getTwo()); assertEquals("three", ta.getThree()); - assertEquals(2, whitelistMethod.painlessAnnotations.size()); + assertEquals(2, allowlistMethod.painlessAnnotations.size()); ++count; } } diff --git a/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation b/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation index 35808a46a90a2..897fb9c7aff9c 100644 --- a/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation +++ b/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation @@ -1,4 +1,4 @@ -# whitelist for annotation tests +# allowlist for annotation tests class org.opensearch.painless.AnnotationTestObject @no_import { void deprecatedMethod() @deprecated[message="use another method"] diff --git a/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation.unknown b/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation.unknown index 386a0bfd7acf7..c5bb17cc42cb2 100644 --- a/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation.unknown +++ b/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation.unknown @@ -1,4 +1,4 @@ -# whitelist for annotation tests with unknown annotation +# allowlist for annotation tests with unknown annotation class org.opensearch.painless.AnnotationTestObject @no_import { void unknownAnnotationMethod() @unknownAnnotation diff --git a/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation.unknown_with_options b/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation.unknown_with_options index 616776d5ed3e0..d6d96cc6cfc16 100644 --- a/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation.unknown_with_options +++ b/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.annotation.unknown_with_options @@ -1,4 +1,4 @@ -# whitelist for annotation tests with unknown annotation containing options +# allowlist for annotation tests with unknown annotation containing options class org.opensearch.painless.AnnotationTestObject @no_import { void unknownAnnotationMethod() @unknownAnootationWithMessage[arg="arg value"] diff --git a/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.test b/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.test index d232904057000..5345f7fab8794 100644 --- a/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.test +++ b/modules/lang-painless/src/test/resources/org/opensearch/painless/spi/org.opensearch.painless.test @@ -1,4 +1,4 @@ -# whitelist for tests +# allowlist for tests # TODO: remove this when the transition from Joda to Java datetimes is completed class org.opensearch.script.JodaCompatibleZonedDateTime { diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml index fd5c89b490d39..cb118ed9d562f 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml @@ -21,7 +21,6 @@ - match: { _index: test_1 } - match: { _id: "1" } - - match: { _type: _doc } - match: { _version: 2 } - do: @@ -43,7 +42,6 @@ - match: { _index: test_1 } - match: { _id: "1" } - - match: { _type: _doc } - match: { _version: 3 } - do: @@ -65,7 +63,6 @@ - match: { _index: test_1 } - match: { _id: "1" } - - match: { _type: _doc } - match: { _version: 4 } - do: @@ -89,7 +86,6 @@ - match: { _index: test_1 } - match: { _id: "1" } - - match: { _type: _doc } - match: { _version: 5 } - do: diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/30_search.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/30_search.yml index 0ce1e369cb7c5..a006fde630716 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/30_search.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/30_search.yml @@ -452,10 +452,6 @@ --- "Exception on negative score": - - skip: - version: " - 6.99.99" - reason: "check on negative scores was added from 7.0.0 on" - - do: index: index: test diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml index 50fc0eea501df..fee31fca55dd5 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml @@ -1,10 +1,6 @@ -# Sanity integration test to make sure the custom context and whitelist work for moving_fn pipeline agg +# Sanity integration test to make sure the custom context and allowlist work for moving_fn pipeline agg # setup: - - skip: - version: " - 6.3.99" - reason: "moving_fn added in 6.4.0" - - do: indices.create: index: test diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/80_script_score.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/80_script_score.yml index 495ca2131d886..d506db0cb0d3e 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/80_script_score.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/80_script_score.yml @@ -1,10 +1,4 @@ # Integration tests for ScriptScoreQuery using Painless - -setup: - - skip: - version: " - 6.99.99" - reason: "script score query was introduced in 7.0.0" - --- "Math functions": diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/90_interval_query_filter.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/90_interval_query_filter.yml index 0a6cf993e2a2e..7db3eb8b9b2aa 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/90_interval_query_filter.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/90_interval_query_filter.yml @@ -1,8 +1,4 @@ setup: - - skip: - version: " - 6.99.99" - reason: "Implemented in 7.0" - - do: indices.create: index: test diff --git a/modules/mapper-extras/src/javaRestTest/java/org/opensearch/index/mapper/TokenCountFieldMapperIntegrationIT.java b/modules/mapper-extras/src/javaRestTest/java/org/opensearch/index/mapper/TokenCountFieldMapperIntegrationIT.java index d400fb69f34ca..cbba425a04889 100644 --- a/modules/mapper-extras/src/javaRestTest/java/org/opensearch/index/mapper/TokenCountFieldMapperIntegrationIT.java +++ b/modules/mapper-extras/src/javaRestTest/java/org/opensearch/index/mapper/TokenCountFieldMapperIntegrationIT.java @@ -187,7 +187,7 @@ private void init() throws IOException { } private IndexRequestBuilder prepareIndex(String id, String... texts) throws IOException { - return client().prepareIndex("test", "test", id).setSource("foo", texts); + return client().prepareIndex("test").setId(id).setSource("foo", texts); } private SearchResponse searchById(String id) { @@ -203,7 +203,7 @@ private SearchRequestBuilder searchByNumericRange(int low, int high) { } private SearchRequestBuilder prepareSearch() { - SearchRequestBuilder request = client().prepareSearch("test").setTypes("test"); + SearchRequestBuilder request = client().prepareSearch("test"); request.addStoredField("foo.token_count"); request.addStoredField("foo.token_count_without_position_increments"); if (loadCountedFields) { diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/BWCTemplateTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/BWCTemplateTests.java index ea9cc46c7707b..d9e40fac1ad0f 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/BWCTemplateTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/BWCTemplateTests.java @@ -58,8 +58,8 @@ public void testBeatsTemplatesBWC() throws Exception { client().admin().indices().preparePutTemplate("packetbeat").setSource(packetBeat, XContentType.JSON).get(); client().admin().indices().preparePutTemplate("filebeat").setSource(fileBeat, XContentType.JSON).get(); - client().prepareIndex("metricbeat-foo", "doc", "1").setSource("message", "foo").get(); - client().prepareIndex("packetbeat-foo", "doc", "1").setSource("message", "foo").get(); - client().prepareIndex("filebeat-foo", "doc", "1").setSource("message", "foo").get(); + client().prepareIndex("metricbeat-foo").setId("1").setSource("message", "foo").get(); + client().prepareIndex("packetbeat-foo").setId("1").setSource("message", "foo").get(); + client().prepareIndex("filebeat-foo").setId("1").setSource("message", "foo").get(); } } diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java index 46e71096ba307..3161e7462d2a0 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java @@ -91,7 +91,7 @@ public void testDocumentParsingFailsOnMetaField() throws Exception { BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(rfMetaField, 0).endObject()); MapperParsingException e = expectThrows( MapperParsingException.class, - () -> mapper.parse(new SourceToParse("test", "_doc", "1", bytes, XContentType.JSON)) + () -> mapper.parse(new SourceToParse("test", "1", bytes, XContentType.JSON)) ); assertTrue( e.getCause().getMessage().contains("Field [" + rfMetaField + "] is a metadata field and cannot be added inside a document.") diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java index b3db286d39dac..3de322b286183 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java @@ -134,7 +134,6 @@ public void testNotIndexed() throws Exception { ParsedDocument doc = mapper.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", 123).endObject()), XContentType.JSON @@ -156,7 +155,6 @@ public void testNoDocValues() throws Exception { ParsedDocument doc = mapper.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", 123).endObject()), XContentType.JSON @@ -178,7 +176,6 @@ public void testStore() throws Exception { ParsedDocument doc = mapper.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", 123).endObject()), XContentType.JSON @@ -202,7 +199,6 @@ public void testCoerce() throws Exception { ParsedDocument doc = mapper.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "123").endObject()), XContentType.JSON @@ -222,7 +218,6 @@ public void testCoerce() throws Exception { ThrowingRunnable runnable = () -> mapper2.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "123").endObject()), XContentType.JSON @@ -246,7 +241,6 @@ private void doTestIgnoreMalformed(String value, String exceptionMessageContains ThrowingRunnable runnable = () -> mapper.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", value).endObject()), XContentType.JSON @@ -261,7 +255,6 @@ private void doTestIgnoreMalformed(String value, String exceptionMessageContains ParsedDocument doc = mapper2.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", value).endObject()), XContentType.JSON @@ -277,7 +270,6 @@ public void testNullValue() throws IOException { ParsedDocument doc = mapper.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField("field").endObject()), XContentType.JSON @@ -291,7 +283,6 @@ public void testNullValue() throws IOException { doc = mapper.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField("field").endObject()), XContentType.JSON diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java index 40c4fd24b0b48..b0d7bb9d2e14e 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java @@ -63,7 +63,6 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws new CompressedXContent( Strings.toString( PutMappingRequest.buildFromSimplifiedDef( - "_doc", "my_feature_field", "type=rank_feature", "my_negative_feature_field", diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/10_basic.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/10_basic.yml index 8a874d30591f6..6fea35eb21f4e 100644 --- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/10_basic.yml +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/10_basic.yml @@ -1,8 +1,4 @@ setup: - - skip: - version: " - 6.99.99" - reason: "The rank feature field/query was introduced in 7.0.0" - - do: indices.create: index: test diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/10_basic.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/10_basic.yml index f524bd93bb600..d4d5d2a360406 100644 --- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/10_basic.yml +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/10_basic.yml @@ -1,8 +1,4 @@ setup: - - skip: - version: " - 6.99.99" - reason: "The rank_features field was introduced in 7.0.0" - - do: indices.create: index: test diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/10_basic.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/10_basic.yml index ffe05097748a6..21843dad1d177 100644 --- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/10_basic.yml +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/10_basic.yml @@ -19,7 +19,6 @@ setup: - do: index: index: test - type: _doc id: 1 body: a_field: "quick brown fox jump lazy dog" @@ -28,7 +27,6 @@ setup: - do: index: index: test - type: _doc id: 2 body: a_field: "xylophone xylophone xylophone" @@ -41,7 +39,6 @@ setup: - do: get: index: test - type: _doc id: 1 - is_true: found diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting.yml index 15778393959e5..58441abac8f88 100644 --- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting.yml +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting.yml @@ -22,7 +22,6 @@ setup: - do: index: index: test - type: _doc id: 1 body: a_field: "quick brown fox jump lazy dog" diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java index 74c884c9d0e25..4e98d2aa1af08 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java @@ -132,7 +132,7 @@ public void testParentWithMultipleBuckets() throws Exception { TopHits topHits = childrenBucket.getAggregations().get("top_comments"); logger.info("total_hits={}", topHits.getHits().getTotalHits().value); for (SearchHit searchHit : topHits.getHits()) { - logger.info("hit= {} {} {}", searchHit.getSortValues()[0], searchHit.getType(), searchHit.getId()); + logger.info("hit= {} {}", searchHit.getSortValues()[0], searchHit.getId()); } } @@ -207,7 +207,7 @@ public void testWithDeletes() throws Exception { * the updates cause that. */ UpdateResponse updateResponse; - updateResponse = client().prepareUpdate(indexName, "doc", idToUpdate) + updateResponse = client().prepareUpdate(indexName, idToUpdate) .setRouting("1") .setDoc(Requests.INDEX_CONTENT_TYPE, "count", 1) .setDetectNoop(false) diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java index 2972b170e07b7..ce83790c0d302 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java @@ -201,7 +201,7 @@ public void testSimpleChildQuery() throws Exception { // TEST FETCHING _parent from child SearchResponse searchResponse; - searchResponse = client().prepareSearch("test").setQuery(idsQuery("doc").addIds("c1")).get(); + searchResponse = client().prepareSearch("test").setQuery(idsQuery().addIds("c1")).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1")); @@ -534,7 +534,7 @@ public void testHasChildAndHasParentFailWhenSomeSegmentsDontContainAnyParentOrCh createIndexRequest("test", "parent", "1", null, "p_field", 1).get(); createIndexRequest("test", "child", "2", "1", "c_field", 1).get(); - client().prepareIndex("test", "doc", "3").setSource("p_field", 1).get(); + client().prepareIndex("test").setId("3").setSource("p_field", 1).get(); refresh(); SearchResponse searchResponse = client().prepareSearch("test") @@ -608,7 +608,7 @@ public void testExplainUsage() throws Exception { assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getExplanation().getDescription(), containsString("join value p1")); - ExplainResponse explainResponse = client().prepareExplain("test", "doc", parentId) + ExplainResponse explainResponse = client().prepareExplain("test", parentId) .setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max)) .get(); assertThat(explainResponse.isExists(), equalTo(true)); @@ -769,7 +769,7 @@ public void testParentChildQueriesCanHandleNoRelevantTypesInIndex() throws Excep assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(0L)); - client().prepareIndex("test", "doc") + client().prepareIndex("test") .setSource(jsonBuilder().startObject().field("text", "value").endObject()) .setRefreshPolicy(RefreshPolicy.IMMEDIATE) .get(); @@ -801,7 +801,7 @@ public void testHasChildAndHasParentFilter_withFilter() throws Exception { createIndexRequest("test", "child", "2", "1", "c_field", 1).get(); client().admin().indices().prepareFlush("test").get(); - client().prepareIndex("test", "doc", "3").setSource("p_field", 2).get(); + client().prepareIndex("test").setId("3").setSource("p_field", 2).get(); refresh(); SearchResponse searchResponse = client().prepareSearch("test") @@ -1326,7 +1326,7 @@ public void testParentChildQueriesNoParentType() throws Exception { ensureGreen(); String parentId = "p1"; - client().prepareIndex("test", "doc", parentId).setSource("p_field", "1").get(); + client().prepareIndex("test").setId(parentId).setSource("p_field", "1").get(); refresh(); try { diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java index 913cba6950228..ed53b1643cc75 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java @@ -151,9 +151,7 @@ public void testSimpleParentChild() throws Exception { assertThat(innerHits.getTotalHits().value, equalTo(2L)); assertThat(innerHits.getAt(0).getId(), equalTo("c1")); - assertThat(innerHits.getAt(0).getType(), equalTo("doc")); assertThat(innerHits.getAt(1).getId(), equalTo("c2")); - assertThat(innerHits.getAt(1).getType(), equalTo("doc")); final boolean seqNoAndTerm = randomBoolean(); response = client().prepareSearch("articles") @@ -172,11 +170,8 @@ public void testSimpleParentChild() throws Exception { assertThat(innerHits.getTotalHits().value, equalTo(3L)); assertThat(innerHits.getAt(0).getId(), equalTo("c4")); - assertThat(innerHits.getAt(0).getType(), equalTo("doc")); assertThat(innerHits.getAt(1).getId(), equalTo("c5")); - assertThat(innerHits.getAt(1).getType(), equalTo("doc")); assertThat(innerHits.getAt(2).getId(), equalTo("c6")); - assertThat(innerHits.getAt(2).getType(), equalTo("doc")); if (seqNoAndTerm) { assertThat(innerHits.getAt(0).getPrimaryTerm(), equalTo(1L)); @@ -301,7 +296,6 @@ public void testRandomParentChild() throws Exception { int offset2 = 0; for (int parent = 0; parent < numDocs; parent++) { SearchHit searchHit = searchResponse.getHits().getAt(parent); - assertThat(searchHit.getType(), equalTo("doc")); assertThat(searchHit.getId(), equalTo(String.format(Locale.ENGLISH, "p_%03d", parent))); assertThat(searchHit.getShard(), notNullValue()); @@ -309,7 +303,6 @@ public void testRandomParentChild() throws Exception { assertThat(inner.getTotalHits().value, equalTo((long) child1InnerObjects[parent])); for (int child = 0; child < child1InnerObjects[parent] && child < size; child++) { SearchHit innerHit = inner.getAt(child); - assertThat(innerHit.getType(), equalTo("doc")); String childId = String.format(Locale.ENGLISH, "c1_%04d", offset1 + child); assertThat(innerHit.getId(), equalTo(childId)); assertThat(innerHit.getNestedIdentity(), nullValue()); @@ -320,7 +313,6 @@ public void testRandomParentChild() throws Exception { assertThat(inner.getTotalHits().value, equalTo((long) child2InnerObjects[parent])); for (int child = 0; child < child2InnerObjects[parent] && child < size; child++) { SearchHit innerHit = inner.getAt(child); - assertThat(innerHit.getType(), equalTo("doc")); String childId = String.format(Locale.ENGLISH, "c2_%04d", offset2 + child); assertThat(innerHit.getId(), equalTo(childId)); assertThat(innerHit.getNestedIdentity(), nullValue()); @@ -376,16 +368,12 @@ public void testInnerHitsOnHasParent() throws Exception { SearchHit searchHit = response.getHits().getAt(0); assertThat(searchHit.getId(), equalTo("3")); - assertThat(searchHit.getType(), equalTo("doc")); assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L)); - assertThat(searchHit.getInnerHits().get("question").getAt(0).getType(), equalTo("doc")); assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("1")); searchHit = response.getHits().getAt(1); assertThat(searchHit.getId(), equalTo("4")); - assertThat(searchHit.getType(), equalTo("doc")); assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L)); - assertThat(searchHit.getInnerHits().get("question").getAt(0).getType(), equalTo("doc")); assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("2")); } @@ -430,12 +418,10 @@ public void testParentChildMultipleLayers() throws Exception { SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); assertThat(innerHits.getTotalHits().value, equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("3")); - assertThat(innerHits.getAt(0).getType(), equalTo("doc")); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); assertThat(innerHits.getTotalHits().value, equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("5")); - assertThat(innerHits.getAt(0).getType(), equalTo("doc")); response = client().prepareSearch("articles") .setQuery( @@ -455,12 +441,10 @@ public void testParentChildMultipleLayers() throws Exception { innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); assertThat(innerHits.getTotalHits().value, equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("4")); - assertThat(innerHits.getAt(0).getType(), equalTo("doc")); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); assertThat(innerHits.getTotalHits().value, equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("6")); - assertThat(innerHits.getAt(0).getType(), equalTo("doc")); } public void testRoyals() throws Exception { @@ -613,7 +597,7 @@ public void testUseMaxDocInsteadOfSize() throws Exception { assertHitCount(response, 1); } - public void testNestedInnerHitWrappedInParentChildInnerhit() throws Exception { + public void testNestedInnerHitWrappedInParentChildInnerhit() { assertAcked( prepareCreate("test").addMapping( "doc", @@ -646,7 +630,7 @@ public void testNestedInnerHitWrappedInParentChildInnerhit() throws Exception { assertThat(hit.getInnerHits().get("child_type").getAt(0).getInnerHits().get("nested_type").getAt(0).field("_parent"), nullValue()); } - public void testInnerHitsWithIgnoreUnmapped() throws Exception { + public void testInnerHitsWithIgnoreUnmapped() { assertAcked( prepareCreate("index1").addMapping( "doc", @@ -660,7 +644,7 @@ public void testInnerHitsWithIgnoreUnmapped() throws Exception { assertAcked(prepareCreate("index2")); createIndexRequest("index1", "parent_type", "1", null, "nested_type", Collections.singletonMap("key", "value")).get(); createIndexRequest("index1", "child_type", "2", "1").get(); - client().prepareIndex("index2", "type", "3").setSource("key", "value").get(); + client().prepareIndex("index2").setId("3").setSource("key", "value").get(); refresh(); SearchResponse response = client().prepareSearch("index1", "index2") @@ -676,7 +660,7 @@ public void testInnerHitsWithIgnoreUnmapped() throws Exception { assertSearchHits(response, "1", "3"); } - public void testTooHighResultWindow() throws Exception { + public void testTooHighResultWindow() { assertAcked( prepareCreate("index1").addMapping( "doc", diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java index 5c5c6b47fb806..5d6d4fb333d49 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java @@ -129,7 +129,7 @@ private IndexRequestBuilder createIndexRequest(String index, String type, String String name = type; type = "doc"; - IndexRequestBuilder indexRequestBuilder = client().prepareIndex(index, type, id); + IndexRequestBuilder indexRequestBuilder = client().prepareIndex(index).setId(id); Map joinField = new HashMap<>(); if (parentId != null) { joinField.put("name", name); diff --git a/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java b/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java index 62040b3893e83..a9ac151dd3806 100644 --- a/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java +++ b/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java @@ -80,13 +80,7 @@ public void testSingleLevel() throws Exception { // Doc without join ParsedDocument doc = docMapper.parse( - new SourceToParse( - "test", - "type", - "0", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), - XContentType.JSON - ) + new SourceToParse("test", "0", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON) ); assertNull(doc.rootDoc().getBinaryValue("join_field")); @@ -94,7 +88,6 @@ public void testSingleLevel() throws Exception { doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "parent").endObject()), XContentType.JSON @@ -107,7 +100,6 @@ public void testSingleLevel() throws Exception { doc = docMapper.parse( new SourceToParse( "test", - "type", "2", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -131,7 +123,6 @@ public void testSingleLevel() throws Exception { () -> docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "unknown").endObject()), XContentType.JSON @@ -161,7 +152,6 @@ public void testParentIdSpecifiedAsNumber() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "2", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -181,7 +171,6 @@ public void testParentIdSpecifiedAsNumber() throws Exception { doc = docMapper.parse( new SourceToParse( "test", - "type", "2", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -222,13 +211,7 @@ public void testMultipleLevels() throws Exception { // Doc without join ParsedDocument doc = docMapper.parse( - new SourceToParse( - "test", - "type", - "0", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), - XContentType.JSON - ) + new SourceToParse("test", "0", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON) ); assertNull(doc.rootDoc().getBinaryValue("join_field")); @@ -236,7 +219,6 @@ public void testMultipleLevels() throws Exception { doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "parent").endObject()), XContentType.JSON @@ -249,7 +231,6 @@ public void testMultipleLevels() throws Exception { doc = docMapper.parse( new SourceToParse( "test", - "type", "2", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -274,7 +255,6 @@ public void testMultipleLevels() throws Exception { () -> docMapper.parse( new SourceToParse( "test", - "type", "2", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "child").endObject()), XContentType.JSON, @@ -290,7 +270,6 @@ public void testMultipleLevels() throws Exception { () -> docMapper.parse( new SourceToParse( "test", - "type", "2", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -311,7 +290,6 @@ public void testMultipleLevels() throws Exception { doc = docMapper.parse( new SourceToParse( "test", - "type", "3", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -335,7 +313,6 @@ public void testMultipleLevels() throws Exception { () -> docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "unknown").endObject()), XContentType.JSON @@ -474,7 +451,7 @@ public void testUpdateRelations() throws Exception { .endObject() ); docMapper = indexService.mapperService() - .merge("_doc", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE); + .merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE); ParentJoinFieldMapper mapper = ParentJoinFieldMapper.getMapper(indexService.mapperService()); assertNotNull(mapper); assertEquals("join_field", mapper.name()); @@ -501,7 +478,7 @@ public void testUpdateRelations() throws Exception { .endObject() ); docMapper = indexService.mapperService() - .merge("_doc", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE); + .merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE); ParentJoinFieldMapper mapper = ParentJoinFieldMapper.getMapper(indexService.mapperService()); assertNotNull(mapper); assertEquals("join_field", mapper.name()); diff --git a/modules/parent-join/src/test/java/org/opensearch/join/query/HasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/opensearch/join/query/HasChildQueryBuilderTests.java index e069fc23a141d..5595c98a439bf 100644 --- a/modules/parent-join/src/test/java/org/opensearch/join/query/HasChildQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/opensearch/join/query/HasChildQueryBuilderTests.java @@ -290,13 +290,9 @@ public void testFromJson() throws IOException { } public void testToQueryInnerQueryType() throws IOException { - String[] searchTypes = new String[] { TYPE }; QueryShardContext shardContext = createShardContext(); - shardContext.setTypes(searchTypes); HasChildQueryBuilder hasChildQueryBuilder = hasChildQuery(CHILD_DOC, new IdsQueryBuilder().addIds("id"), ScoreMode.None); Query query = hasChildQueryBuilder.toQuery(shardContext); - // verify that the context types are still the same as the ones we previously set - assertThat(shardContext.getTypes(), equalTo(searchTypes)); assertLateParsingQuery(query, CHILD_DOC, "id"); } diff --git a/modules/parent-join/src/test/java/org/opensearch/join/query/HasParentQueryBuilderTests.java b/modules/parent-join/src/test/java/org/opensearch/join/query/HasParentQueryBuilderTests.java index 9783cb703ade1..0f983799a6d25 100644 --- a/modules/parent-join/src/test/java/org/opensearch/join/query/HasParentQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/opensearch/join/query/HasParentQueryBuilderTests.java @@ -192,13 +192,9 @@ public void testIllegalValues() throws IOException { } public void testToQueryInnerQueryType() throws IOException { - String[] searchTypes = new String[] { TYPE }; QueryShardContext shardContext = createShardContext(); - shardContext.setTypes(searchTypes); HasParentQueryBuilder hasParentQueryBuilder = new HasParentQueryBuilder(PARENT_DOC, new IdsQueryBuilder().addIds("id"), false); Query query = hasParentQueryBuilder.toQuery(shardContext); - // verify that the context types are still the same as the ones we previously set - assertThat(shardContext.getTypes(), equalTo(searchTypes)); HasChildQueryBuilderTests.assertLateParsingQuery(query, PARENT_DOC, "id"); } diff --git a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/20_parent_join.yml b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/20_parent_join.yml index bff5639e4d270..bb2d39fbbdd4e 100644 --- a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/20_parent_join.yml +++ b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/20_parent_join.yml @@ -49,35 +49,29 @@ teardown: - match: { hits.total: 6 } - match: { hits.hits.0._index: "test" } - - match: { hits.hits.0._type: "_doc" } - match: { hits.hits.0._id: "3" } - match: { hits.hits.0._source.join_field.name: "child" } - match: { hits.hits.0._source.join_field.parent: "1" } - is_false: hits.hits.0.fields.join_field#child } - match: { hits.hits.1._index: "test" } - - match: { hits.hits.1._type: "_doc" } - match: { hits.hits.1._id: "4" } - match: { hits.hits.1._source.join_field.name: "child" } - match: { hits.hits.1._source.join_field.parent: "1" } - is_false: hits.hits.1.fields.join_field#child } - match: { hits.hits.2._index: "test" } - - match: { hits.hits.2._type: "_doc" } - match: { hits.hits.2._id: "5" } - match: { hits.hits.2._source.join_field.name: "child" } - match: { hits.hits.2._source.join_field.parent: "2" } - is_false: hits.hits.2.fields.join_field#child } - match: { hits.hits.3._index: "test" } - - match: { hits.hits.3._type: "_doc" } - match: { hits.hits.3._id: "6" } - match: { hits.hits.3._source.join_field.name: "grand_child" } - match: { hits.hits.3._source.join_field.parent: "5" } - match: { hits.hits.4._index: "test" } - - match: { hits.hits.4._type: "_doc" } - match: { hits.hits.4._id: "1" } - match: { hits.hits.4._source.join_field.name: "parent" } - is_false: hits.hits.4._source.join_field.parent - match: { hits.hits.5._index: "test" } - - match: { hits.hits.5._type: "_doc" } - match: { hits.hits.5._id: "2" } - match: { hits.hits.5._source.join_field.name: "parent" } - is_false: hits.hits.5._source.join_field.parent @@ -96,12 +90,10 @@ teardown: - match: { hits.total: 2 } - match: { hits.hits.0._index: "test" } - - match: { hits.hits.0._type: "_doc" } - match: { hits.hits.0._id: "3" } - match: { hits.hits.0._source.join_field.name: "child" } - match: { hits.hits.0._source.join_field.parent: "1" } - match: { hits.hits.1._index: "test" } - - match: { hits.hits.1._type: "_doc" } - match: { hits.hits.1._id: "4" } - match: { hits.hits.1._source.join_field.name: "child" } - match: { hits.hits.1._source.join_field.parent: "1" } diff --git a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java index c111590d7a2ca..37c0eb051cd55 100644 --- a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java @@ -104,13 +104,16 @@ public void testPercolatorQuery() throws Exception { .addMapping("type", "id", "type=keyword", "field1", "type=keyword", "field2", "type=keyword", "query", "type=percolator") ); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("id", "1").field("query", matchAllQuery()).endObject()) .get(); - client().prepareIndex("test", "type", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("id", "2").field("query", matchQuery("field1", "value")).endObject()) .get(); - client().prepareIndex("test", "type", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("id", "3") @@ -195,13 +198,16 @@ public void testPercolatorRangeQueries() throws Exception { ) ); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("query", rangeQuery("field1").from(10).to(12)).endObject()) .get(); - client().prepareIndex("test", "type", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("query", rangeQuery("field1").from(20).to(22)).endObject()) .get(); - client().prepareIndex("test", "type", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("query", boolQuery().must(rangeQuery("field1").from(10).to(12)).must(rangeQuery("field1").from(12).to(14))) @@ -209,13 +215,16 @@ public void testPercolatorRangeQueries() throws Exception { ) .get(); client().admin().indices().prepareRefresh().get(); - client().prepareIndex("test", "type", "4") + client().prepareIndex("test") + .setId("4") .setSource(jsonBuilder().startObject().field("query", rangeQuery("field2").from(10).to(12)).endObject()) .get(); - client().prepareIndex("test", "type", "5") + client().prepareIndex("test") + .setId("5") .setSource(jsonBuilder().startObject().field("query", rangeQuery("field2").from(20).to(22)).endObject()) .get(); - client().prepareIndex("test", "type", "6") + client().prepareIndex("test") + .setId("6") .setSource( jsonBuilder().startObject() .field("query", boolQuery().must(rangeQuery("field2").from(10).to(12)).must(rangeQuery("field2").from(12).to(14))) @@ -223,13 +232,16 @@ public void testPercolatorRangeQueries() throws Exception { ) .get(); client().admin().indices().prepareRefresh().get(); - client().prepareIndex("test", "type", "7") + client().prepareIndex("test") + .setId("7") .setSource(jsonBuilder().startObject().field("query", rangeQuery("field3").from("192.168.1.0").to("192.168.1.5")).endObject()) .get(); - client().prepareIndex("test", "type", "8") + client().prepareIndex("test") + .setId("8") .setSource(jsonBuilder().startObject().field("query", rangeQuery("field3").from("192.168.1.20").to("192.168.1.30")).endObject()) .get(); - client().prepareIndex("test", "type", "9") + client().prepareIndex("test") + .setId("9") .setSource( jsonBuilder().startObject() .field( @@ -240,7 +252,8 @@ public void testPercolatorRangeQueries() throws Exception { .endObject() ) .get(); - client().prepareIndex("test", "type", "10") + client().prepareIndex("test") + .setId("10") .setSource( jsonBuilder().startObject() .field( @@ -315,7 +328,8 @@ public void testPercolatorGeoQueries() throws Exception { ) ); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("query", geoDistanceQuery("field1").point(52.18, 4.38).distance(50, DistanceUnit.KILOMETERS)) @@ -324,7 +338,8 @@ public void testPercolatorGeoQueries() throws Exception { ) .get(); - client().prepareIndex("test", "type", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("query", geoBoundingBoxQuery("field1").setCorners(52.3, 4.4, 52.1, 4.6)) @@ -333,7 +348,8 @@ public void testPercolatorGeoQueries() throws Exception { ) .get(); - client().prepareIndex("test", "type", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field( @@ -367,13 +383,16 @@ public void testPercolatorQueryExistingDocument() throws Exception { .addMapping("type", "id", "type=keyword", "field1", "type=keyword", "field2", "type=keyword", "query", "type=percolator") ); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("id", "1").field("query", matchAllQuery()).endObject()) .get(); - client().prepareIndex("test", "type", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("id", "2").field("query", matchQuery("field1", "value")).endObject()) .get(); - client().prepareIndex("test", "type", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("id", "3") @@ -382,9 +401,9 @@ public void testPercolatorQueryExistingDocument() throws Exception { ) .get(); - client().prepareIndex("test", "type", "4").setSource("{\"id\": \"4\"}", XContentType.JSON).get(); - client().prepareIndex("test", "type", "5").setSource(XContentType.JSON, "id", "5", "field1", "value").get(); - client().prepareIndex("test", "type", "6").setSource(XContentType.JSON, "id", "6", "field1", "value", "field2", "value").get(); + client().prepareIndex("test").setId("4").setSource("{\"id\": \"4\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("5").setSource(XContentType.JSON, "id", "5", "field1", "value").get(); + client().prepareIndex("test").setId("6").setSource(XContentType.JSON, "id", "6", "field1", "value", "field2", "value").get(); client().admin().indices().prepareRefresh().get(); logger.info("percolating empty doc"); @@ -422,9 +441,9 @@ public void testPercolatorQueryExistingDocumentSourceDisabled() throws Exception .addMapping("type", "_source", "enabled=false", "field1", "type=keyword", "query", "type=percolator") ); - client().prepareIndex("test", "type", "1").setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()).get(); + client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()).get(); - client().prepareIndex("test", "type", "2").setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId("2").setSource("{}", XContentType.JSON).get(); client().admin().indices().prepareRefresh().get(); logger.info("percolating empty doc with source disabled"); @@ -443,10 +462,12 @@ public void testPercolatorSpecificQueries() throws Exception { .addMapping("type", "id", "type=keyword", "field1", "type=text", "field2", "type=text", "query", "type=percolator") ); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("id", 1).field("query", commonTermsQuery("field1", "quick brown fox")).endObject()) .get(); - client().prepareIndex("test", "type", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("id", 2) @@ -454,7 +475,8 @@ public void testPercolatorSpecificQueries() throws Exception { .endObject() ) .get(); - client().prepareIndex("test", "type", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("id", 3) @@ -469,7 +491,8 @@ public void testPercolatorSpecificQueries() throws Exception { .get(); client().admin().indices().prepareRefresh().get(); - client().prepareIndex("test", "type", "4") + client().prepareIndex("test") + .setId("4") .setSource( jsonBuilder().startObject() .field("id", 4) @@ -489,7 +512,8 @@ public void testPercolatorSpecificQueries() throws Exception { .get(); // doesn't match - client().prepareIndex("test", "type", "5") + client().prepareIndex("test") + .setId("5") .setSource( jsonBuilder().startObject() .field("id", 5) @@ -543,23 +567,28 @@ public void testPercolatorQueryWithHighlighting() throws Exception { .prepareCreate("test") .addMapping("type", "id", "type=keyword", "field1", fieldMapping, "query", "type=percolator") ); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("id", "1").field("query", matchQuery("field1", "brown fox")).endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("id", "2").field("query", matchQuery("field1", "lazy dog")).endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type", "3") + client().prepareIndex("test") + .setId("3") .setSource(jsonBuilder().startObject().field("id", "3").field("query", termQuery("field1", "jumps")).endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type", "4") + client().prepareIndex("test") + .setId("4") .setSource(jsonBuilder().startObject().field("id", "4").field("query", termQuery("field1", "dog")).endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type", "5") + client().prepareIndex("test") + .setId("5") .setSource(jsonBuilder().startObject().field("id", "5").field("query", termQuery("field1", "fox")).endObject()) .execute() .actionGet(); @@ -783,10 +812,12 @@ public void testTakePositionOffsetGapIntoAccount() throws Exception { .prepareCreate("test") .addMapping("type", "field", "type=text,position_increment_gap=5", "query", "type=percolator") ); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("query", new MatchPhraseQueryBuilder("field", "brown fox").slop(4)).endObject()) .get(); - client().prepareIndex("test", "type", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("query", new MatchPhraseQueryBuilder("field", "brown fox").slop(5)).endObject()) .get(); client().admin().indices().prepareRefresh().get(); @@ -868,10 +899,12 @@ public void testWithMultiplePercolatorFields() throws Exception { ); // Acceptable: - client().prepareIndex("test1", "type", "1") + client().prepareIndex("test1") + .setId("1") .setSource(jsonBuilder().startObject().field(queryFieldName, matchQuery("field", "value")).endObject()) .get(); - client().prepareIndex("test2", "type", "1") + client().prepareIndex("test2") + .setId("1") .setSource( jsonBuilder().startObject() .startObject("object_field") @@ -889,7 +922,6 @@ public void testWithMultiplePercolatorFields() throws Exception { .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getType(), equalTo("type")); assertThat(response.getHits().getAt(0).getIndex(), equalTo("test1")); response = client().prepareSearch() @@ -898,12 +930,12 @@ public void testWithMultiplePercolatorFields() throws Exception { .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getType(), equalTo("type")); assertThat(response.getHits().getAt(0).getIndex(), equalTo("test2")); // Unacceptable: MapperParsingException e = expectThrows(MapperParsingException.class, () -> { - client().prepareIndex("test2", "type", "1") + client().prepareIndex("test2") + .setId("1") .setSource( jsonBuilder().startObject() .startArray("object_field") @@ -946,7 +978,8 @@ public void testPercolateQueryWithNestedDocuments() throws Exception { .endObject() .endObject(); assertAcked(client().admin().indices().prepareCreate("test").addMapping("employee", mapping)); - client().prepareIndex("test", "employee", "q1") + client().prepareIndex("test") + .setId("q1") .setSource( jsonBuilder().startObject() .field("id", "q1") @@ -962,7 +995,8 @@ public void testPercolateQueryWithNestedDocuments() throws Exception { ) .get(); // this query should never match as it doesn't use nested query: - client().prepareIndex("test", "employee", "q2") + client().prepareIndex("test") + .setId("q2") .setSource( jsonBuilder().startObject() .field("id", "q2") @@ -972,7 +1006,8 @@ public void testPercolateQueryWithNestedDocuments() throws Exception { .get(); client().admin().indices().prepareRefresh().get(); - client().prepareIndex("test", "employee", "q3") + client().prepareIndex("test") + .setId("q3") .setSource(jsonBuilder().startObject().field("id", "q3").field("query", QueryBuilders.matchAllQuery()).endObject()) .get(); client().admin().indices().prepareRefresh().get(); @@ -1103,15 +1138,18 @@ public void testPercolateQueryWithNestedDocuments() throws Exception { public void testPercolatorQueryViaMultiSearch() throws Exception { assertAcked(client().admin().indices().prepareCreate("test").addMapping("type", "field1", "type=text", "query", "type=percolator")); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("query", boolQuery().must(matchQuery("field1", "b")).must(matchQuery("field1", "c"))) @@ -1119,11 +1157,13 @@ public void testPercolatorQueryViaMultiSearch() throws Exception { ) .execute() .actionGet(); - client().prepareIndex("test", "type", "4") + client().prepareIndex("test") + .setId("4") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type", "5") + client().prepareIndex("test") + .setId("5") .setSource(jsonBuilder().startObject().field("field1", "c").endObject()) .execute() .actionGet(); @@ -1217,7 +1257,7 @@ public void testDisallowExpensiveQueries() throws IOException { .addMapping("_doc", "id", "type=keyword", "field1", "type=keyword", "query", "type=percolator") ); - client().prepareIndex("test", "_doc") + client().prepareIndex("test") .setId("1") .setSource(jsonBuilder().startObject().field("id", "1").field("query", matchQuery("field1", "value")).endObject()) .get(); @@ -1266,13 +1306,13 @@ public void testWrappedWithConstantScore() throws Exception { assertAcked(client().admin().indices().prepareCreate("test").addMapping("_doc", "d", "type=date", "q", "type=percolator")); - client().prepareIndex("test", "_doc") + client().prepareIndex("test") .setId("1") .setSource(jsonBuilder().startObject().field("q", boolQuery().must(rangeQuery("d").gt("now"))).endObject()) .execute() .actionGet(); - client().prepareIndex("test", "_doc") + client().prepareIndex("test") .setId("2") .setSource(jsonBuilder().startObject().field("q", boolQuery().must(rangeQuery("d").lt("now"))).endObject()) .execute() diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java index 5e245f7082ada..87f08e2ff50fc 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java @@ -501,13 +501,7 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) { return rewritten; } } - GetRequest getRequest; - if (indexedDocumentType != null) { - deprecationLogger.deprecate("percolate_with_type", TYPE_DEPRECATION_MESSAGE); - getRequest = new GetRequest(indexedDocumentIndex, indexedDocumentType, indexedDocumentId); - } else { - getRequest = new GetRequest(indexedDocumentIndex, indexedDocumentId); - } + GetRequest getRequest = new GetRequest(indexedDocumentIndex, indexedDocumentId); getRequest.preference("_local"); getRequest.routing(indexedDocumentRouting); getRequest.preference(indexedDocumentPreference); @@ -590,9 +584,9 @@ protected Query doToQuery(QueryShardContext context) throws IOException { ); } } - docMapper = mapperService.documentMapper(type); + docMapper = mapperService.documentMapper(); for (BytesReference document : documents) { - docs.add(docMapper.parse(new SourceToParse(context.index().getName(), type, "_temp_id", document, documentXContentType))); + docs.add(docMapper.parse(new SourceToParse(context.index().getName(), "_temp_id", document, documentXContentType))); } FieldNameAnalyzer fieldNameAnalyzer = (FieldNameAnalyzer) docMapper.mappers().indexAnalyzer(); diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java index aaf81ad576fff..a8b0395dd84e0 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java @@ -195,7 +195,12 @@ static BinaryFieldMapper createQueryBuilderFieldBuilder(BuilderContext context) } static RangeFieldMapper createExtractedRangeFieldBuilder(String name, RangeType rangeType, BuilderContext context) { - RangeFieldMapper.Builder builder = new RangeFieldMapper.Builder(name, rangeType, true); + RangeFieldMapper.Builder builder = new RangeFieldMapper.Builder( + name, + rangeType, + true, + hasIndexCreated(context.indexSettings()) ? context.indexCreatedVersion() : null + ); // For now no doc values, because in processQuery(...) only the Lucene range fields get added: builder.docValues(false); return builder.build(context); diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorHighlightSubFetchPhase.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorHighlightSubFetchPhase.java index 670aa74501f60..9b4e42d239750 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorHighlightSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorHighlightSubFetchPhase.java @@ -38,7 +38,6 @@ import org.apache.lucene.search.QueryVisitor; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.document.DocumentField; -import org.opensearch.common.text.Text; import org.opensearch.search.SearchHit; import org.opensearch.search.fetch.FetchContext; import org.opensearch.search.fetch.FetchSubPhase; @@ -108,13 +107,7 @@ public void process(HitContext hit) throws IOException { int slot = (int) matchedSlot; BytesReference document = percolateQuery.getDocuments().get(slot); HitContext subContext = new HitContext( - new SearchHit( - slot, - "unknown", - new Text(hit.hit().getType()), - Collections.emptyMap(), - Collections.emptyMap() - ), + new SearchHit(slot, "unknown", Collections.emptyMap(), Collections.emptyMap()), percolatorLeafReaderContext, slot, new SourceLookup() diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java index 871351ad5b2c2..4058548f052f8 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java @@ -209,7 +209,7 @@ public void init() throws Exception { .endObject() ); mapperService.merge("type", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); - fieldMapper = (PercolatorFieldMapper) mapperService.documentMapper("type").mappers().getMapper(queryField); + fieldMapper = (PercolatorFieldMapper) mapperService.documentMapper().mappers().getMapper(queryField); fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType(); queries = new ArrayList<>(); diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java index 3b0830b7e4519..12be15552652c 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java @@ -110,20 +110,14 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws docType, new CompressedXContent( Strings.toString( - PutMappingRequest.buildFromSimplifiedDef( - docType, - queryField, - "type=percolator", - aliasField, - "type=alias,path=" + queryField - ) + PutMappingRequest.buildFromSimplifiedDef(queryField, "type=percolator", aliasField, "type=alias,path=" + queryField) ) ), MapperService.MergeReason.MAPPING_UPDATE ); mapperService.merge( docType, - new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(docType, TEXT_FIELD_NAME, "type=text"))), + new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(TEXT_FIELD_NAME, "type=text"))), MapperService.MergeReason.MAPPING_UPDATE ); } @@ -184,7 +178,6 @@ protected String[] shuffleProtectedFields() { @Override protected GetResponse executeGet(GetRequest getRequest) { assertThat(getRequest.index(), Matchers.equalTo(indexedDocumentIndex)); - assertThat(getRequest.type(), Matchers.equalTo(MapperService.SINGLE_MAPPING_NAME)); assertThat(getRequest.id(), Matchers.equalTo(indexedDocumentId)); assertThat(getRequest.routing(), Matchers.equalTo(indexedDocumentRouting)); assertThat(getRequest.preference(), Matchers.equalTo(indexedDocumentPreference)); @@ -193,7 +186,6 @@ protected GetResponse executeGet(GetRequest getRequest) { return new GetResponse( new GetResult( indexedDocumentIndex, - MapperService.SINGLE_MAPPING_NAME, indexedDocumentId, 0, 1, @@ -208,7 +200,6 @@ protected GetResponse executeGet(GetRequest getRequest) { return new GetResponse( new GetResult( indexedDocumentIndex, - MapperService.SINGLE_MAPPING_NAME, indexedDocumentId, UNASSIGNED_SEQ_NO, 0, @@ -341,7 +332,6 @@ public void testFromJsonWithType() throws IOException { + "\"}}" ); rewriteAndFetch(queryBuilder, queryShardContext).toQuery(queryShardContext); - assertWarnings(PercolateQueryBuilder.TYPE_DEPRECATION_MESSAGE); } public void testBothDocumentAndDocumentsSpecified() { diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java index a25ab9a2bb76f..5038e72e9be5e 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java @@ -50,7 +50,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws super.initializeAdditionalMappings(mapperService); mapperService.merge( "_doc", - new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef("_doc", "some_nested_object", "type=nested"))), + new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef("some_nested_object", "type=nested"))), MapperService.MergeReason.MAPPING_UPDATE ); } diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java index 027067563103c..2c0aa593317b4 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java @@ -161,7 +161,6 @@ public void init() throws Exception { String mapper = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("doc") .startObject("properties") .startObject("field") .field("type", "text") @@ -204,9 +203,8 @@ public void init() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - mapperService.merge("doc", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE); } private void addQueryFieldMappings() throws Exception { @@ -214,16 +212,18 @@ private void addQueryFieldMappings() throws Exception { String percolatorMapper = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("doc") .startObject("properties") .startObject(fieldName) .field("type", "percolator") .endObject() .endObject() .endObject() - .endObject() ); - mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(percolatorMapper), + MapperService.MergeReason.MAPPING_UPDATE + ); fieldType = (PercolatorFieldMapper.PercolatorFieldType) mapperService.fieldType(fieldName); } @@ -235,7 +235,7 @@ public void testExtractTerms() throws Exception { TermQuery termQuery2 = new TermQuery(new Term("field", "term2")); bq.add(termQuery2, Occur.SHOULD); - DocumentMapper documentMapper = mapperService.documentMapper("doc"); + DocumentMapper documentMapper = mapperService.documentMapper(); PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName); IndexMetadata build = IndexMetadata.builder("") .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)) @@ -295,7 +295,7 @@ public void testExtractRanges() throws Exception { Query rangeQuery2 = mapperService.fieldType("number_field1").rangeQuery(15, 20, true, true, null, null, null, context); bq.add(rangeQuery2, Occur.MUST); - DocumentMapper documentMapper = mapperService.documentMapper("doc"); + DocumentMapper documentMapper = mapperService.documentMapper(); IndexMetadata build = IndexMetadata.builder("") .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)) .numberOfShards(1) @@ -354,7 +354,7 @@ public void testExtractRanges() throws Exception { public void testExtractTermsAndRanges_failed() throws Exception { addQueryFieldMappings(); TermRangeQuery query = new TermRangeQuery("field1", new BytesRef("a"), new BytesRef("z"), true, true); - DocumentMapper documentMapper = mapperService.documentMapper("doc"); + DocumentMapper documentMapper = mapperService.documentMapper(); PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName); IndexMetadata build = IndexMetadata.builder("") .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)) @@ -380,7 +380,7 @@ public void testExtractTermsAndRanges_failed() throws Exception { public void testExtractTermsAndRanges_partial() throws Exception { addQueryFieldMappings(); PhraseQuery phraseQuery = new PhraseQuery("field", "term"); - DocumentMapper documentMapper = mapperService.documentMapper("doc"); + DocumentMapper documentMapper = mapperService.documentMapper(); PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName); IndexMetadata build = IndexMetadata.builder("") .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)) @@ -549,11 +549,10 @@ public void testExtractTermsAndRanges_numberFields() throws Exception { public void testPercolatorFieldMapper() throws Exception { addQueryFieldMappings(); QueryBuilder queryBuilder = termQuery("field", "value"); - ParsedDocument doc = mapperService.documentMapper("doc") + ParsedDocument doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()), XContentType.JSON @@ -570,11 +569,10 @@ public void testPercolatorFieldMapper() throws Exception { // add an query for which we don't extract terms from queryBuilder = rangeQuery("field").from("a").to("z"); - doc = mapperService.documentMapper("doc") + doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()), XContentType.JSON @@ -592,7 +590,6 @@ public void testPercolatorFieldMapper() throws Exception { .parse( new SourceToParse( "test", - "doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()), XContentType.JSON @@ -617,11 +614,10 @@ public void testStoringQueries() throws Exception { // (it can't use shard data for rewriting purposes, because percolator queries run on MemoryIndex) for (QueryBuilder query : queries) { - ParsedDocument doc = mapperService.documentMapper("doc") + ParsedDocument doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, query).endObject()), XContentType.JSON @@ -634,13 +630,12 @@ public void testStoringQueries() throws Exception { public void testQueryWithRewrite() throws Exception { addQueryFieldMappings(); - client().prepareIndex("remote", "doc", "1").setSource("field", "value").get(); + client().prepareIndex("remote").setId("1").setSource("field", "value").get(); QueryBuilder queryBuilder = termsLookupQuery("field", new TermsLookup("remote", "1", "field")); - ParsedDocument doc = mapperService.documentMapper("doc") + ParsedDocument doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()), XContentType.JSON @@ -661,11 +656,10 @@ public void testQueryWithRewrite() throws Exception { public void testPercolatorFieldMapperUnMappedField() throws Exception { addQueryFieldMappings(); MapperParsingException exception = expectThrows(MapperParsingException.class, () -> { - mapperService.documentMapper("doc") + mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", "1", BytesReference.bytes( XContentFactory.jsonBuilder().startObject().field(fieldName, termQuery("unmapped_field", "value")).endObject() @@ -680,11 +674,10 @@ public void testPercolatorFieldMapperUnMappedField() throws Exception { public void testPercolatorFieldMapper_noQuery() throws Exception { addQueryFieldMappings(); - ParsedDocument doc = mapperService.documentMapper("doc") + ParsedDocument doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON @@ -693,11 +686,10 @@ public void testPercolatorFieldMapper_noQuery() throws Exception { assertThat(doc.rootDoc().getFields(fieldType.queryBuilderField.name()).length, equalTo(0)); try { - mapperService.documentMapper("doc") + mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField(fieldName).endObject()), XContentType.JSON @@ -716,7 +708,6 @@ public void testAllowNoAdditionalSettings() throws Exception { String percolatorMapper = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("doc") .startObject("properties") .startObject(fieldName) .field("type", "percolator") @@ -724,18 +715,21 @@ public void testAllowNoAdditionalSettings() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); MapperParsingException e = expectThrows( MapperParsingException.class, - () -> mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE) + () -> mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(percolatorMapper), + MapperService.MergeReason.MAPPING_UPDATE + ) ); assertThat(e.getMessage(), containsString("Mapping definition for [" + fieldName + "] has unsupported parameters: [index : no]")); } // multiple percolator fields are allowed in the mapping, but only one field can be used at index time. public void testMultiplePercolatorFields() throws Exception { - String typeName = "doc"; + String typeName = MapperService.SINGLE_MAPPING_NAME; String percolatorMapper = Strings.toString( XContentFactory.jsonBuilder() .startObject() @@ -754,11 +748,10 @@ public void testMultiplePercolatorFields() throws Exception { mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); QueryBuilder queryBuilder = matchQuery("field", "value"); - ParsedDocument doc = mapperService.documentMapper(typeName) + ParsedDocument doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - typeName, "1", BytesReference.bytes( jsonBuilder().startObject().field("query_field1", queryBuilder).field("query_field2", queryBuilder).endObject() @@ -776,7 +769,7 @@ public void testMultiplePercolatorFields() throws Exception { // percolator field can be nested under an object field, but only one query can be specified per document public void testNestedPercolatorField() throws Exception { - String typeName = "doc"; + String typeName = MapperService.SINGLE_MAPPING_NAME; String percolatorMapper = Strings.toString( XContentFactory.jsonBuilder() .startObject() @@ -797,11 +790,10 @@ public void testNestedPercolatorField() throws Exception { mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); QueryBuilder queryBuilder = matchQuery("field", "value"); - ParsedDocument doc = mapperService.documentMapper(typeName) + ParsedDocument doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - typeName, "1", BytesReference.bytes( jsonBuilder().startObject().startObject("object_field").field("query_field", queryBuilder).endObject().endObject() @@ -817,11 +809,10 @@ public void testNestedPercolatorField() throws Exception { BytesRef queryBuilderAsBytes = queryBuilderField.binaryValue(); assertQueryBuilder(queryBuilderAsBytes, queryBuilder); - doc = mapperService.documentMapper(typeName) + doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - typeName, "1", BytesReference.bytes( jsonBuilder().startObject() @@ -840,11 +831,10 @@ public void testNestedPercolatorField() throws Exception { assertQueryBuilder(queryBuilderAsBytes, queryBuilder); MapperParsingException e = expectThrows(MapperParsingException.class, () -> { - mapperService.documentMapper(typeName) + mapperService.documentMapper() .parse( new SourceToParse( "test", - typeName, "1", BytesReference.bytes( jsonBuilder().startObject() @@ -948,11 +938,10 @@ public void testImplicitlySetDefaultScriptLang() throws Exception { query.endObject(); query.endObject(); - ParsedDocument doc = mapperService.documentMapper("doc") + ParsedDocument doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -996,11 +985,10 @@ public void testImplicitlySetDefaultScriptLang() throws Exception { query.endObject(); query.endObject(); - doc = mapperService.documentMapper("doc") + doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -1091,11 +1079,10 @@ public void testDuplicatedClauses() throws Exception { QueryBuilder qb = boolQuery().must(boolQuery().must(termQuery("field", "value1")).must(termQuery("field", "value2"))) .must(boolQuery().must(termQuery("field", "value2")).must(termQuery("field", "value3"))); - ParsedDocument doc = mapperService.documentMapper("doc") + ParsedDocument doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, qb).endObject()), XContentType.JSON @@ -1117,11 +1104,10 @@ public void testDuplicatedClauses() throws Exception { .must(boolQuery().must(termQuery("field", "value2")).must(termQuery("field", "value3"))) .must(boolQuery().must(termQuery("field", "value3")).must(termQuery("field", "value4"))) .must(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5"))); - doc = mapperService.documentMapper("doc") + doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, qb).endObject()), XContentType.JSON @@ -1146,11 +1132,10 @@ public void testDuplicatedClauses() throws Exception { .should(boolQuery().should(termQuery("field", "value2")).should(termQuery("field", "value3"))) .should(boolQuery().should(termQuery("field", "value3")).should(termQuery("field", "value4"))) .should(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5"))); - doc = mapperService.documentMapper("doc") + doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, qb).endObject()), XContentType.JSON diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorQuerySearchTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorQuerySearchTests.java index fa7727d46ce40..e212ad6630e9a 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorQuerySearchTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorQuerySearchTests.java @@ -97,7 +97,8 @@ protected Map, Object>> pluginScripts() { public void testPercolateScriptQuery() throws IOException { client().admin().indices().prepareCreate("index").addMapping("type", "query", "type=percolator").get(); - client().prepareIndex("index", "type", "1") + client().prepareIndex("index") + .setId("1") .setSource( jsonBuilder().startObject() .field( @@ -151,7 +152,8 @@ public void testPercolateQueryWithNestedDocuments_doNotLeakBitsetCacheEntries() .setSettings(Settings.builder().put(BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING.getKey(), false)) .addMapping("employee", mapping) ); - client().prepareIndex("test", "employee", "q1") + client().prepareIndex("test") + .setId("q1") .setSource( jsonBuilder().startObject() .field( @@ -238,7 +240,8 @@ public void testPercolateQueryWithNestedDocuments_doLeakFieldDataCacheEntries() mapping.endObject(); createIndex("test", client().admin().indices().prepareCreate("test").addMapping("employee", mapping)); Script script = new Script(ScriptType.INLINE, MockScriptPlugin.NAME, "use_fielddata_please", Collections.emptyMap()); - client().prepareIndex("test", "employee", "q1") + client().prepareIndex("test") + .setId("q1") .setSource( jsonBuilder().startObject() .field("query", QueryBuilders.nestedQuery("employees", QueryBuilders.scriptQuery(script), ScoreMode.Avg)) @@ -279,7 +282,8 @@ public void testPercolateQueryWithNestedDocuments_doLeakFieldDataCacheEntries() public void testMapUnmappedFieldAsText() throws IOException { Settings.Builder settings = Settings.builder().put("index.percolator.map_unmapped_fields_as_text", true); createIndex("test", settings.build(), "query", "query", "type=percolator"); - client().prepareIndex("test", "query", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "value")).endObject()) .get(); client().admin().indices().prepareRefresh().get(); @@ -310,10 +314,12 @@ public void testRangeQueriesWithNow() throws Exception { "type=percolator" ); - client().prepareIndex("test", "_doc", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("query", rangeQuery("field2").from("now-1h").to("now+1h")).endObject()) .get(); - client().prepareIndex("test", "_doc", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field( @@ -325,7 +331,8 @@ public void testRangeQueriesWithNow() throws Exception { .get(); Script script = new Script(ScriptType.INLINE, MockScriptPlugin.NAME, "1==1", Collections.emptyMap()); - client().prepareIndex("test", "_doc", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("query", boolQuery().filter(scriptQuery(script)).filter(rangeQuery("field2").from("now-1h").to("now+1h"))) diff --git a/modules/percolator/src/test/resources/rest-api-spec/test/11_basic_with_types.yml b/modules/percolator/src/test/resources/rest-api-spec/test/11_basic_with_types.yml deleted file mode 100644 index 896d2d514bcb9..0000000000000 --- a/modules/percolator/src/test/resources/rest-api-spec/test/11_basic_with_types.yml +++ /dev/null @@ -1,96 +0,0 @@ ---- -"Test percolator basics via rest": - - - do: - indices.create: - include_type_name: true - index: queries_index - body: - mappings: - queries_type: - properties: - query: - type: percolator - foo: - type: keyword - - - do: - indices.create: - include_type_name: true - index: documents_index - body: - mappings: - documents_type: - properties: - foo: - type: keyword - - - do: - index: - index: queries_index - type: queries_type - id: test_percolator - body: - query: - match_all: {} - - - do: - index: - index: documents_index - type: documents_type - id: some_id - body: - foo: bar - - - do: - indices.refresh: {} - - - do: - search: - rest_total_hits_as_int: true - body: - - query: - percolate: - field: query - document: - document_type: queries_type - foo: bar - - match: { hits.total: 1 } - - - do: - msearch: - rest_total_hits_as_int: true - body: - - index: queries_index - - query: - percolate: - field: query - document_type: queries_type - document: - foo: bar - - match: { responses.0.hits.total: 1 } - - - do: - search: - rest_total_hits_as_int: true - body: - - query: - percolate: - field: query - index: documents_index - type: documents_type - id: some_id - - match: { hits.total: 1 } - - - do: - msearch: - rest_total_hits_as_int: true - body: - - index: queries_index - - query: - percolate: - field: query - index: documents_index - type: documents_type - id: some_id - - match: { responses.0.hits.total: 1 } diff --git a/modules/percolator/src/yamlRestTest/resources/rest-api-spec/test/10_basic.yml b/modules/percolator/src/yamlRestTest/resources/rest-api-spec/test/10_basic.yml index 08d344687adc7..35ebb2b099139 100644 --- a/modules/percolator/src/yamlRestTest/resources/rest-api-spec/test/10_basic.yml +++ b/modules/percolator/src/yamlRestTest/resources/rest-api-spec/test/10_basic.yml @@ -1,10 +1,5 @@ --- "Test percolator basics via rest": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: indices.create: index: queries_index @@ -74,7 +69,7 @@ percolate: field: query index: documents_index - id: some_id + id: some_id - match: { hits.total: 1 } - do: diff --git a/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java b/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java index 78f0002fd4730..ea80b59711b8a 100644 --- a/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java +++ b/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.search.SearchRequest; import org.opensearch.action.support.IndicesOptions; import org.opensearch.index.IndexNotFoundException; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.MatchAllQueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.rankeval.PrecisionAtK.Detail; @@ -74,29 +73,18 @@ public void setup() { createIndex(TEST_INDEX); ensureGreen(); - client().prepareIndex(TEST_INDEX, MapperService.SINGLE_MAPPING_NAME, "1") + client().prepareIndex(TEST_INDEX) + .setId("1") .setSource("id", 1, "text", "berlin", "title", "Berlin, Germany", "population", 3670622) .get(); - client().prepareIndex(TEST_INDEX, MapperService.SINGLE_MAPPING_NAME, "2") - .setSource("id", 2, "text", "amsterdam", "population", 851573) - .get(); - client().prepareIndex(TEST_INDEX, MapperService.SINGLE_MAPPING_NAME, "3") - .setSource("id", 3, "text", "amsterdam", "population", 851573) - .get(); - client().prepareIndex(TEST_INDEX, MapperService.SINGLE_MAPPING_NAME, "4") - .setSource("id", 4, "text", "amsterdam", "population", 851573) - .get(); - client().prepareIndex(TEST_INDEX, MapperService.SINGLE_MAPPING_NAME, "5") - .setSource("id", 5, "text", "amsterdam", "population", 851573) - .get(); - client().prepareIndex(TEST_INDEX, MapperService.SINGLE_MAPPING_NAME, "6") - .setSource("id", 6, "text", "amsterdam", "population", 851573) - .get(); + client().prepareIndex(TEST_INDEX).setId("2").setSource("id", 2, "text", "amsterdam", "population", 851573).get(); + client().prepareIndex(TEST_INDEX).setId("3").setSource("id", 3, "text", "amsterdam", "population", 851573).get(); + client().prepareIndex(TEST_INDEX).setId("4").setSource("id", 4, "text", "amsterdam", "population", 851573).get(); + client().prepareIndex(TEST_INDEX).setId("5").setSource("id", 5, "text", "amsterdam", "population", 851573).get(); + client().prepareIndex(TEST_INDEX).setId("6").setSource("id", 6, "text", "amsterdam", "population", 851573).get(); // add another index for testing closed indices etc... - client().prepareIndex("test2", MapperService.SINGLE_MAPPING_NAME, "7") - .setSource("id", 7, "text", "amsterdam", "population", 851573) - .get(); + client().prepareIndex("test2").setId("7").setSource("id", 7, "text", "amsterdam", "population", 851573).get(); refresh(); // set up an alias that can also be used in tests diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java index 7c385cd45a840..2b1c56d9bba3b 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java @@ -36,7 +36,6 @@ import org.opensearch.common.Strings; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.NamedWriteableRegistry; -import org.opensearch.common.text.Text; import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; @@ -44,7 +43,6 @@ import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.shard.ShardId; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchShardTarget; @@ -87,13 +85,7 @@ public void testDCGAt() { SearchHit[] hits = new SearchHit[6]; for (int i = 0; i < 6; i++) { rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i])); - hits[i] = new SearchHit( - i, - Integer.toString(i), - new Text(MapperService.SINGLE_MAPPING_NAME), - Collections.emptyMap(), - Collections.emptyMap() - ); + hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); @@ -143,13 +135,7 @@ public void testDCGAtSixMissingRatings() { rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i])); } } - hits[i] = new SearchHit( - i, - Integer.toString(i), - new Text(MapperService.SINGLE_MAPPING_NAME), - Collections.emptyMap(), - Collections.emptyMap() - ); + hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); @@ -206,13 +192,7 @@ public void testDCGAtFourMoreRatings() { // only create four hits SearchHit[] hits = new SearchHit[4]; for (int i = 0; i < 4; i++) { - hits[i] = new SearchHit( - i, - Integer.toString(i), - new Text(MapperService.SINGLE_MAPPING_NAME), - Collections.emptyMap(), - Collections.emptyMap() - ); + hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/ExpectedReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/ExpectedReciprocalRankTests.java index 4fb0089a32cb1..723a1e2202e2b 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/ExpectedReciprocalRankTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/ExpectedReciprocalRankTests.java @@ -35,7 +35,6 @@ import org.opensearch.action.OriginalIndices; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.NamedWriteableRegistry; -import org.opensearch.common.text.Text; import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; @@ -43,7 +42,6 @@ import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.shard.ShardId; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchShardTarget; @@ -130,13 +128,7 @@ private SearchHit[] createSearchHits(List rated, Integer[] releva if (relevanceRatings[i] != null) { rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i])); } - hits[i] = new SearchHit( - i, - Integer.toString(i), - new Text(MapperService.SINGLE_MAPPING_NAME), - Collections.emptyMap(), - Collections.emptyMap() - ); + hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } return hits; diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/MeanReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/MeanReciprocalRankTests.java index befb9bdf371ab..2cd16c05f2a20 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/MeanReciprocalRankTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/MeanReciprocalRankTests.java @@ -35,7 +35,6 @@ import org.opensearch.action.OriginalIndices; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.NamedWriteableRegistry; -import org.opensearch.common.text.Text; import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; @@ -216,7 +215,7 @@ public void testXContentParsingIsNotLenient() throws IOException { private static SearchHit[] createSearchHits(int from, int to, String index) { SearchHit[] hits = new SearchHit[to + 1 - from]; for (int i = from; i <= to; i++) { - hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap()); + hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); } return hits; diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/PrecisionAtKTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/PrecisionAtKTests.java index ea9eadb0c9cbd..1c7a02dc27cf7 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/PrecisionAtKTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/PrecisionAtKTests.java @@ -35,7 +35,6 @@ import org.opensearch.action.OriginalIndices; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.NamedWriteableRegistry; -import org.opensearch.common.text.Text; import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; @@ -43,7 +42,6 @@ import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.shard.ShardId; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchShardTarget; @@ -127,7 +125,7 @@ public void testIgnoreUnlabeled() { rated.add(createRatedDoc("test", "1", RELEVANT_RATING)); // add an unlabeled search hit SearchHit[] searchHits = Arrays.copyOf(toSearchHits(rated, "test"), 3); - searchHits[2] = new SearchHit(2, "2", new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap()); + searchHits[2] = new SearchHit(2, "2", Collections.emptyMap(), Collections.emptyMap()); searchHits[2].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", searchHits, rated); @@ -146,7 +144,7 @@ public void testIgnoreUnlabeled() { public void testNoRatedDocs() throws Exception { SearchHit[] hits = new SearchHit[5]; for (int i = 0; i < 5; i++) { - hits[i] = new SearchHit(i, i + "", new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap()); + hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", hits, Collections.emptyList()); @@ -283,7 +281,7 @@ private static PrecisionAtK mutate(PrecisionAtK original) { private static SearchHit[] toSearchHits(List rated, String index) { SearchHit[] hits = new SearchHit[rated.size()]; for (int i = 0; i < rated.size(); i++) { - hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap()); + hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); } return hits; diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java index 7e81dde0cab29..3d883b373d705 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java @@ -44,14 +44,12 @@ import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.text.Text; import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentLocation; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.shard.ShardId; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchParseException; @@ -188,9 +186,9 @@ public void testToXContent() throws IOException { + " \"coffee_query\": {" + " \"metric_score\": 0.1," + " \"unrated_docs\": [{\"_index\":\"index\",\"_id\":\"456\"}]," - + " \"hits\":[{\"hit\":{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"123\",\"_score\":1.0}," + + " \"hits\":[{\"hit\":{\"_index\":\"index\",\"_id\":\"123\",\"_score\":1.0}," + " \"rating\":5}," - + " {\"hit\":{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"456\",\"_score\":1.0}," + + " {\"hit\":{\"_index\":\"index\",\"_id\":\"456\",\"_score\":1.0}," + " \"rating\":null}" + " ]" + " }" @@ -210,13 +208,7 @@ public void testToXContent() throws IOException { } private static RatedSearchHit searchHit(String index, int docId, Integer rating) { - SearchHit hit = new SearchHit( - docId, - docId + "", - new Text(MapperService.SINGLE_MAPPING_NAME), - Collections.emptyMap(), - Collections.emptyMap() - ); + SearchHit hit = new SearchHit(docId, docId + "", Collections.emptyMap(), Collections.emptyMap()); hit.shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); hit.score(1.0f); return new RatedSearchHit(hit, rating != null ? OptionalInt.of(rating) : OptionalInt.empty()); diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedSearchHitTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedSearchHitTests.java index bfc9098f59e43..555a0c95a3456 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedSearchHitTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedSearchHitTests.java @@ -34,11 +34,9 @@ import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.NamedWriteableRegistry; -import org.opensearch.common.text.Text; import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.index.mapper.MapperService; import org.opensearch.search.SearchHit; import org.opensearch.test.OpenSearchTestCase; @@ -55,7 +53,6 @@ public static RatedSearchHit randomRatedSearchHit() { SearchHit searchHit = new SearchHit( randomIntBetween(0, 10), randomAlphaOfLength(10), - new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap() ); @@ -71,13 +68,7 @@ private static RatedSearchHit mutateTestItem(RatedSearchHit original) { rating = rating.isPresent() ? OptionalInt.of(rating.getAsInt() + 1) : OptionalInt.of(randomInt(5)); break; case 1: - hit = new SearchHit( - hit.docId(), - hit.getId() + randomAlphaOfLength(10), - new Text(MapperService.SINGLE_MAPPING_NAME), - Collections.emptyMap(), - Collections.emptyMap() - ); + hit = new SearchHit(hit.docId(), hit.getId() + randomAlphaOfLength(10), Collections.emptyMap(), Collections.emptyMap()); break; default: throw new IllegalStateException("The test should only allow two parameters mutated"); diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RecallAtKTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RecallAtKTests.java index 6ffaaa8e89e7b..6efb44a3875e1 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RecallAtKTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RecallAtKTests.java @@ -35,7 +35,6 @@ import org.opensearch.action.OriginalIndices; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.NamedWriteableRegistry; -import org.opensearch.common.text.Text; import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; @@ -128,7 +127,7 @@ public void testNoRatedDocs() throws Exception { int k = 5; SearchHit[] hits = new SearchHit[k]; for (int i = 0; i < k; i++) { - hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap()); + hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } @@ -252,7 +251,7 @@ private static RecallAtK mutate(RecallAtK original) { private static SearchHit[] toSearchHits(List rated, String index) { SearchHit[] hits = new SearchHit[rated.size()]; for (int i = 0; i < rated.size(); i++) { - hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap()); + hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); } return hits; diff --git a/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/10_basic.yml b/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/10_basic.yml index 382b0789ba0ec..2ad583e03caaa 100644 --- a/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/10_basic.yml +++ b/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/10_basic.yml @@ -40,11 +40,6 @@ setup: --- "Response format": - - - skip: - version: " - 6.2.99" - reason: response format was updated in 6.3 - - do: rank_eval: index: foo, @@ -121,11 +116,6 @@ setup: --- "Mean Reciprocal Rank": - - - skip: - version: " - 6.2.99" - reason: response format was updated in 6.3 - - do: rank_eval: body: { @@ -160,11 +150,6 @@ setup: --- "Expected Reciprocal Rank": - - - skip: - version: " - 6.3.99" - reason: ERR was introduced in 6.4 - - do: rank_eval: body: { diff --git a/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/20_dcg.yml b/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/20_dcg.yml index 90094baabb9db..82005efcebe18 100644 --- a/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/20_dcg.yml +++ b/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/20_dcg.yml @@ -1,10 +1,5 @@ --- "Response format": - - - skip: - version: " - 6.1.99" - reason: the ranking evaluation feature is available since 6.2 - - do: index: index: foo diff --git a/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/30_failures.yml b/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/30_failures.yml index b9f55ed12ad7e..c88a769b8687b 100644 --- a/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/30_failures.yml +++ b/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/30_failures.yml @@ -1,10 +1,5 @@ --- "Response format": - - - skip: - version: " - 6.2.99" - reason: response format was updated in 6.3 - - do: index: index: foo diff --git a/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml b/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml index 57d5aa5642ef6..08897e17ef900 100644 --- a/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml +++ b/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml @@ -48,11 +48,6 @@ setup: --- "Basic rank-eval request with template": - - - skip: - version: " - 6.1.99" - reason: the ranking evaluation feature is available since 6.2 - - do: rank_eval: body: { diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index 6639e799cdd5d..37526a924da73 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -49,8 +49,8 @@ testClusters.all { // Modules who's integration is explicitly tested in integration tests module ':modules:parent-join' module ':modules:lang-painless' - // Whitelist reindexing from the local node so we can test reindex-from-remote. - setting 'reindex.remote.whitelist', '127.0.0.1:*' + // Allowlist reindexing from the local node so we can test reindex-from-remote. + setting 'reindex.remote.allowlist', '127.0.0.1:*' } test { diff --git a/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java b/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java index b19de5150dfe8..6d313e06263b3 100644 --- a/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java @@ -114,7 +114,7 @@ public void testUpdateByQuery() { Client client = client(); client.admin().indices().prepareCreate("foo").get(); client.admin().indices().prepareCreate("bar").get(); - client.admin().indices().preparePutMapping(INDEX_NAME).setType("_doc").setSource("cat", "type=keyword").get(); + client.admin().indices().preparePutMapping(INDEX_NAME).setSource("cat", "type=keyword").get(); { // tag::update-by-query UpdateByQueryRequestBuilder updateByQuery = @@ -302,7 +302,7 @@ private ReindexRequestBuilder reindexAndPartiallyBlock() throws Exception { false, true, IntStream.range(0, numDocs) - .mapToObj(i -> client().prepareIndex(INDEX_NAME, "_doc", Integer.toString(i)).setSource("n", Integer.toString(i))) + .mapToObj(i -> client().prepareIndex(INDEX_NAME).setId(Integer.toString(i)).setSource("n", Integer.toString(i))) .collect(Collectors.toList()) ); @@ -311,7 +311,7 @@ private ReindexRequestBuilder reindexAndPartiallyBlock() throws Exception { assertThat(ALLOWED_OPERATIONS.drainPermits(), equalTo(0)); ReindexRequestBuilder builder = new ReindexRequestBuilder(client, ReindexAction.INSTANCE).source(INDEX_NAME) - .destination("target_index", "_doc"); + .destination("target_index"); // Scroll by 1 so that cancellation is easier to control builder.source().setSize(1); @@ -342,16 +342,16 @@ public static class BlockingOperationListener implements IndexingOperationListen @Override public Engine.Index preIndex(ShardId shardId, Engine.Index index) { - return preCheck(index, index.type()); + return preCheck(index); } @Override public Engine.Delete preDelete(ShardId shardId, Engine.Delete delete) { - return preCheck(delete, delete.type()); + return preCheck(delete); } - private T preCheck(T operation, String type) { - if (("_doc".equals(type) == false) || (operation.origin() != Engine.Operation.Origin.PRIMARY)) { + private T preCheck(T operation) { + if ((operation.origin() != Engine.Operation.Origin.PRIMARY)) { return operation; } diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java index 07d67290d8f2f..43adffc6f7671 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java @@ -60,7 +60,6 @@ import org.opensearch.index.mapper.IndexFieldMapper; import org.opensearch.index.mapper.RoutingFieldMapper; import org.opensearch.index.mapper.SourceFieldMapper; -import org.opensearch.index.mapper.TypeFieldMapper; import org.opensearch.index.mapper.VersionFieldMapper; import org.opensearch.index.reindex.ScrollableHitSource.SearchFailure; import org.opensearch.script.Script; @@ -249,7 +248,7 @@ protected boolean accept(ScrollableHitSource.Hit doc) { * change the "fields" part of the search request it is unlikely that we got here because we didn't fetch _source. * Thus the error message assumes that it wasn't stored. */ - throw new IllegalArgumentException("[" + doc.getIndex() + "][" + doc.getType() + "][" + doc.getId() + "] didn't store _source"); + throw new IllegalArgumentException("[" + doc.getIndex() + "][" + doc.getId() + "] didn't store _source"); } return true; } @@ -597,10 +596,6 @@ public interface RequestWrapper> { String getIndex(); - void setType(String type); - - String getType(); - void setId(String id); String getId(); @@ -643,16 +638,6 @@ public String getIndex() { return request.index(); } - @Override - public void setType(String type) { - request.type(type); - } - - @Override - public String getType() { - return request.type(); - } - @Override public void setId(String id) { request.id(id); @@ -732,16 +717,6 @@ public String getIndex() { return request.index(); } - @Override - public void setType(String type) { - request.type(type); - } - - @Override - public String getType() { - return request.type(); - } - @Override public void setId(String id) { request.id(id); @@ -831,7 +806,6 @@ public RequestWrapper apply(RequestWrapper request, ScrollableHitSource.Hi Map context = new HashMap<>(); context.put(IndexFieldMapper.NAME, doc.getIndex()); - context.put(TypeFieldMapper.NAME, doc.getType()); context.put(IdFieldMapper.NAME, doc.getId()); Long oldVersion = doc.getVersion(); context.put(VersionFieldMapper.NAME, oldVersion); @@ -861,10 +835,6 @@ public RequestWrapper apply(RequestWrapper request, ScrollableHitSource.Hi if (false == doc.getIndex().equals(newValue)) { scriptChangedIndex(request, newValue); } - newValue = context.remove(TypeFieldMapper.NAME); - if (false == doc.getType().equals(newValue)) { - scriptChangedType(request, newValue); - } newValue = context.remove(IdFieldMapper.NAME); if (false == doc.getId().equals(newValue)) { scriptChangedId(request, newValue); @@ -899,7 +869,7 @@ protected RequestWrapper scriptChangedOpType(RequestWrapper request, OpTyp taskWorker.countNoop(); return null; case DELETE: - RequestWrapper delete = wrap(new DeleteRequest(request.getIndex(), request.getType(), request.getId())); + RequestWrapper delete = wrap(new DeleteRequest(request.getIndex(), request.getId())); delete.setVersion(request.getVersion()); delete.setVersionType(VersionType.INTERNAL); delete.setRouting(request.getRouting()); @@ -911,8 +881,6 @@ protected RequestWrapper scriptChangedOpType(RequestWrapper request, OpTyp protected abstract void scriptChangedIndex(RequestWrapper request, Object to); - protected abstract void scriptChangedType(RequestWrapper request, Object to); - protected abstract void scriptChangedId(RequestWrapper request, Object to); protected abstract void scriptChangedVersion(RequestWrapper request, Object to); diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/AsyncDeleteByQueryAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/AsyncDeleteByQueryAction.java index ac1a7c22a4d2f..1a9ce16acc255 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/AsyncDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/AsyncDeleteByQueryAction.java @@ -67,7 +67,6 @@ protected boolean accept(ScrollableHitSource.Hit doc) { protected RequestWrapper buildRequest(ScrollableHitSource.Hit doc) { DeleteRequest delete = new DeleteRequest(); delete.index(doc.getIndex()); - delete.type(doc.getType()); delete.id(doc.getId()); delete.setIfSeqNo(doc.getSeqNo()); delete.setIfPrimaryTerm(doc.getPrimaryTerm()); diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexPlugin.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexPlugin.java index 04619efb43c6c..865ae26f6f54d 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexPlugin.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexPlugin.java @@ -133,6 +133,7 @@ public Collection createComponents( public List> getSettings() { final List> settings = new ArrayList<>(); settings.add(TransportReindexAction.REMOTE_CLUSTER_WHITELIST); + settings.add(TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST); settings.addAll(ReindexSslConfig.getSettings()); return settings; } diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexValidator.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexValidator.java index 671827b0164a9..71c3aad8713e1 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexValidator.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexValidator.java @@ -59,7 +59,7 @@ class ReindexValidator { static final String SORT_DEPRECATED_MESSAGE = "The sort option in reindex is deprecated. " + "Instead consider using query filtering to find the desired subset of data."; - private final CharacterRunAutomaton remoteWhitelist; + private final CharacterRunAutomaton remoteAllowlist; private final ClusterService clusterService; private final IndexNameExpressionResolver resolver; private final AutoCreateIndex autoCreateIndex; @@ -70,14 +70,14 @@ class ReindexValidator { IndexNameExpressionResolver resolver, AutoCreateIndex autoCreateIndex ) { - this.remoteWhitelist = buildRemoteWhitelist(TransportReindexAction.REMOTE_CLUSTER_WHITELIST.get(settings)); + this.remoteAllowlist = buildRemoteAllowlist(TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.get(settings)); this.clusterService = clusterService; this.resolver = resolver; this.autoCreateIndex = autoCreateIndex; } void initialValidation(ReindexRequest request) { - checkRemoteWhitelist(remoteWhitelist, request.getRemoteInfo()); + checkRemoteAllowlist(remoteAllowlist, request.getRemoteInfo()); ClusterState state = clusterService.state(); validateAgainstAliases( request.getSearchRequest(), @@ -93,32 +93,32 @@ void initialValidation(ReindexRequest request) { } } - static void checkRemoteWhitelist(CharacterRunAutomaton whitelist, RemoteInfo remoteInfo) { + static void checkRemoteAllowlist(CharacterRunAutomaton allowlist, RemoteInfo remoteInfo) { if (remoteInfo == null) { return; } String check = remoteInfo.getHost() + ':' + remoteInfo.getPort(); - if (whitelist.run(check)) { + if (allowlist.run(check)) { return; } - String whiteListKey = TransportReindexAction.REMOTE_CLUSTER_WHITELIST.getKey(); - throw new IllegalArgumentException('[' + check + "] not whitelisted in " + whiteListKey); + String allowListKey = TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.getKey(); + throw new IllegalArgumentException('[' + check + "] not allowlisted in " + allowListKey); } /** - * Build the {@link CharacterRunAutomaton} that represents the reindex-from-remote whitelist and make sure that it doesn't whitelist + * Build the {@link CharacterRunAutomaton} that represents the reindex-from-remote allowlist and make sure that it doesn't allowlist * the world. */ - static CharacterRunAutomaton buildRemoteWhitelist(List whitelist) { - if (whitelist.isEmpty()) { + static CharacterRunAutomaton buildRemoteAllowlist(List allowlist) { + if (allowlist.isEmpty()) { return new CharacterRunAutomaton(Automata.makeEmpty()); } - Automaton automaton = Regex.simpleMatchToAutomaton(whitelist.toArray(Strings.EMPTY_ARRAY)); + Automaton automaton = Regex.simpleMatchToAutomaton(allowlist.toArray(Strings.EMPTY_ARRAY)); automaton = MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); if (Operations.isTotal(automaton)) { throw new IllegalArgumentException( - "Refusing to start because whitelist " - + whitelist + "Refusing to start because allowlist " + + allowlist + " accepts all addresses. " + "This would allow users to reindex-from-remote any URL they like effectively having OpenSearch make HTTP GETs " + "for them." diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/Reindexer.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/Reindexer.java index 0037e1d06a115..8ade055d10f60 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/Reindexer.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/Reindexer.java @@ -352,13 +352,6 @@ protected RequestWrapper buildRequest(ScrollableHitSource.Hit doc) // Copy the index from the request so we always write where it asked to write index.index(mainRequest.getDestination().index()); - // If the request override's type then the user wants all documents in that type. Otherwise keep the doc's type. - if (mainRequest.getDestination().type() == null) { - index.type(doc.getType()); - } else { - index.type(mainRequest.getDestination().type()); - } - /* * Internal versioning can just use what we copied from the destination request. Otherwise we assume we're using external * versioning and use the doc's version. @@ -460,12 +453,6 @@ protected void scriptChangedIndex(RequestWrapper request, Object to) { request.setIndex(to.toString()); } - @Override - protected void scriptChangedType(RequestWrapper request, Object to) { - requireNonNull(to, "Can't reindex without a destination type!"); - request.setType(to.toString()); - } - @Override protected void scriptChangedId(RequestWrapper request, Object to) { request.setId(Objects.toString(to, null)); diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/RestDeleteByQueryAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/RestDeleteByQueryAction.java index aea72e694a637..6f2e5d8e71edb 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/RestDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/RestDeleteByQueryAction.java @@ -54,7 +54,7 @@ public RestDeleteByQueryAction() { @Override public List routes() { - return unmodifiableList(asList(new Route(POST, "/{index}/_delete_by_query"), new Route(POST, "/{index}/{type}/_delete_by_query"))); + return unmodifiableList(asList(new Route(POST, "/{index}/_delete_by_query"))); } @Override diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/RestUpdateByQueryAction.java index d38cb47fc8398..9be1687a09432 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/RestUpdateByQueryAction.java @@ -55,7 +55,7 @@ public RestUpdateByQueryAction() { @Override public List routes() { - return unmodifiableList(asList(new Route(POST, "/{index}/_update_by_query"), new Route(POST, "/{index}/{type}/_update_by_query"))); + return unmodifiableList(asList(new Route(POST, "/{index}/_update_by_query"))); } @Override diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportReindexAction.java index a24c2b002b759..c84d103a2ef6f 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportReindexAction.java @@ -56,10 +56,19 @@ import static java.util.Collections.emptyList; public class TransportReindexAction extends HandledTransportAction { - public static final Setting> REMOTE_CLUSTER_WHITELIST = Setting.listSetting( + static final Setting> REMOTE_CLUSTER_WHITELIST = Setting.listSetting( "reindex.remote.whitelist", emptyList(), Function.identity(), + Property.NodeScope, + Property.Deprecated + ); + // The setting below is going to replace the above. + // To keep backwards compatibility, the old usage is remained, and it's also used as the fallback for the new usage. + public static final Setting> REMOTE_CLUSTER_ALLOWLIST = Setting.listSetting( + "reindex.remote.allowlist", + REMOTE_CLUSTER_WHITELIST, + Function.identity(), Property.NodeScope ); public static Optional remoteExtension = Optional.empty(); diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportUpdateByQueryAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportUpdateByQueryAction.java index 25fd1a250d362..f07915b9d9e76 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportUpdateByQueryAction.java @@ -46,7 +46,6 @@ import org.opensearch.index.mapper.IdFieldMapper; import org.opensearch.index.mapper.IndexFieldMapper; import org.opensearch.index.mapper.RoutingFieldMapper; -import org.opensearch.index.mapper.TypeFieldMapper; import org.opensearch.script.Script; import org.opensearch.script.ScriptService; import org.opensearch.tasks.Task; @@ -138,7 +137,6 @@ public BiFunction, ScrollableHitSource.Hit, RequestWrapper> protected RequestWrapper buildRequest(ScrollableHitSource.Hit doc) { IndexRequest index = new IndexRequest(); index.index(doc.getIndex()); - index.type(doc.getType()); index.id(doc.getId()); index.source(doc.getSource(), doc.getXContentType()); index.setIfSeqNo(doc.getSeqNo()); @@ -163,11 +161,6 @@ protected void scriptChangedIndex(RequestWrapper request, Object to) { throw new IllegalArgumentException("Modifying [" + IndexFieldMapper.NAME + "] not allowed"); } - @Override - protected void scriptChangedType(RequestWrapper request, Object to) { - throw new IllegalArgumentException("Modifying [" + TypeFieldMapper.NAME + "] not allowed"); - } - @Override protected void scriptChangedId(RequestWrapper request, Object to) { throw new IllegalArgumentException("Modifying [" + IdFieldMapper.NAME + "] not allowed"); diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java index 69ec2e8b852cb..8467fbdeacd0e 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java @@ -79,7 +79,6 @@ static Request initialSearch(SearchRequest searchRequest, BytesReference query, // It is nasty to build paths with StringBuilder but we'll be careful.... StringBuilder path = new StringBuilder("/"); addIndices(path, searchRequest.indices()); - addTypes(path, searchRequest.types()); path.append("_search"); Request request = new Request("POST", path.toString()); @@ -210,16 +209,6 @@ private static String encodeIndex(String s) { } } - private static void addTypes(StringBuilder path, String[] types) { - if (types == null || types.length == 0) { - return; - } - for (String indexOrType : types) { - checkIndexOrType("Type", indexOrType); - } - path.append(Strings.arrayToCommaDelimitedString(types)).append('/'); - } - private static void checkIndexOrType(String name, String indexOrType) { if (indexOrType.indexOf(',') >= 0) { throw new IllegalArgumentException(name + " containing [,] not supported but got [" + indexOrType + "]"); diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteResponseParsers.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteResponseParsers.java index 4c57872462f0b..d22b995036e90 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteResponseParsers.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteResponseParsers.java @@ -75,14 +75,12 @@ private RemoteResponseParsers() {} public static final ConstructingObjectParser HIT_PARSER = new ConstructingObjectParser<>("hit", true, a -> { int i = 0; String index = (String) a[i++]; - String type = (String) a[i++]; String id = (String) a[i++]; Long version = (Long) a[i++]; - return new BasicHit(index, type, id, version == null ? -1 : version); + return new BasicHit(index, id, version == null ? -1 : version); }); static { HIT_PARSER.declareString(constructorArg(), new ParseField("_index")); - HIT_PARSER.declareString(constructorArg(), new ParseField("_type")); HIT_PARSER.declareString(constructorArg(), new ParseField("_id")); HIT_PARSER.declareLong(optionalConstructorArg(), new ParseField("_version")); HIT_PARSER.declareObject(((basicHit, tuple) -> basicHit.setSource(tuple.v1(), tuple.v2())), (p, s) -> { diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionMetadataTestCase.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionMetadataTestCase.java index a71381d968ca8..003f3b0824602 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionMetadataTestCase.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionMetadataTestCase.java @@ -37,7 +37,7 @@ public abstract class AbstractAsyncBulkByScrollActionMetadataTestCase< Response extends BulkByScrollResponse> extends AbstractAsyncBulkByScrollActionTestCase { protected ScrollableHitSource.BasicHit doc() { - return new ScrollableHitSource.BasicHit("index", "type", "id", 0); + return new ScrollableHitSource.BasicHit("index", "id", 0); } protected abstract AbstractAsyncBulkByScrollAction action(); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java index 3c19edc89c865..671faef6c5545 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java @@ -65,8 +65,8 @@ public void setupScriptService() { @SuppressWarnings("unchecked") protected T applyScript(Consumer> scriptBody) { - IndexRequest index = new IndexRequest("index", "type", "1").source(singletonMap("foo", "bar")); - ScrollableHitSource.Hit doc = new ScrollableHitSource.BasicHit("test", "type", "id", 0); + IndexRequest index = new IndexRequest("index").id("1").source(singletonMap("foo", "bar")); + ScrollableHitSource.Hit doc = new ScrollableHitSource.BasicHit("test", "id", 0); UpdateScript.Factory factory = (params, ctx) -> new UpdateScript(Collections.emptyMap(), ctx) { @Override public void execute() { @@ -79,11 +79,6 @@ public void execute() { return (result != null) ? (T) result.self() : null; } - public void testTypeDeprecation() { - applyScript((Map ctx) -> ctx.get("_type")); - assertWarnings("[types removal] Looking up doc types [_type] in scripts is deprecated."); - } - public void testScriptAddingJunkToCtxIsError() { try { applyScript((Map ctx) -> ctx.put("junk", "junk")); @@ -102,16 +97,9 @@ public void testChangeSource() { assertEquals("cat", index.sourceAsMap().get("bar")); } - public void testSetOpTypeNoop() throws Exception { - assertThat(task.getStatus().getNoops(), equalTo(0L)); - assertNull(applyScript((Map ctx) -> ctx.put("op", OpType.NOOP.toString()))); - assertThat(task.getStatus().getNoops(), equalTo(1L)); - } - public void testSetOpTypeDelete() throws Exception { DeleteRequest delete = applyScript((Map ctx) -> ctx.put("op", OpType.DELETE.toString())); assertThat(delete.index(), equalTo("index")); - assertThat(delete.type(), equalTo("type")); assertThat(delete.id(), equalTo("1")); } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/AsyncBulkByScrollActionTests.java index 1dd758150c392..9c2e44f580628 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -73,7 +73,6 @@ import org.opensearch.common.CheckedConsumer; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.settings.Settings; -import org.opensearch.common.text.Text; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.common.util.concurrent.OpenSearchRejectedExecutionException; @@ -288,7 +287,7 @@ public void testScrollResponseSetsTotal() { public void testScrollResponseBatchingBehavior() throws Exception { int maxBatches = randomIntBetween(0, 100); for (int batches = 1; batches < maxBatches; batches++) { - Hit hit = new ScrollableHitSource.BasicHit("index", "type", "id", 0); + Hit hit = new ScrollableHitSource.BasicHit("index", "id", 0); ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), 1, singletonList(hit), null); DummyAsyncBulkByScrollAction action = new DummyAsyncBulkByScrollAction(); simulateScrollResponse(action, System.nanoTime(), 0, response); @@ -315,7 +314,7 @@ public void testBulkResponseSetsLotsOfStatus() throws Exception { responses[i] = new BulkItemResponse( i, randomFrom(DocWriteRequest.OpType.values()), - new Failure(shardId.getIndexName(), "type", "id" + i, new VersionConflictEngineException(shardId, "id", "test")) + new Failure(shardId.getIndexName(), "id" + i, new VersionConflictEngineException(shardId, "id", "test")) ); continue; } @@ -342,15 +341,7 @@ public void testBulkResponseSetsLotsOfStatus() throws Exception { } final int seqNo = randomInt(20); final int primaryTerm = randomIntBetween(1, 16); - final IndexResponse response = new IndexResponse( - shardId, - "type", - "id" + i, - seqNo, - primaryTerm, - randomInt(), - createdResponse - ); + final IndexResponse response = new IndexResponse(shardId, "id" + i, seqNo, primaryTerm, randomInt(), createdResponse); responses[i] = new BulkItemResponse(i, opType, response); } assertExactlyOnce(onSuccess -> new DummyAsyncBulkByScrollAction().onBulkResponse(new BulkResponse(responses, 0), onSuccess)); @@ -433,7 +424,7 @@ public void testSearchTimeoutsAbortRequest() throws Exception { * Mimicks bulk indexing failures. */ public void testBulkFailuresAbortRequest() throws Exception { - Failure failure = new Failure("index", "type", "id", new RuntimeException("test")); + Failure failure = new Failure("index", "id", new RuntimeException("test")); DummyAsyncBulkByScrollAction action = new DummyAsyncBulkByScrollAction(); BulkResponse bulkResponse = new BulkResponse( new BulkItemResponse[] { new BulkItemResponse(0, DocWriteRequest.OpType.CREATE, failure) }, @@ -456,7 +447,7 @@ protected AbstractAsyncBulkByScrollAction.RequestWrapper buildRequest(Hit doc throw new RuntimeException("surprise"); } }; - ScrollableHitSource.BasicHit hit = new ScrollableHitSource.BasicHit("index", "type", "id", 0); + ScrollableHitSource.BasicHit hit = new ScrollableHitSource.BasicHit("index", "id", 0); hit.setSource(new BytesArray("{}"), XContentType.JSON); ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), 1, singletonList(hit), null); simulateScrollResponse(action, System.nanoTime(), 0, response); @@ -541,7 +532,7 @@ protected RequestWrapper buildRequest(Hit doc) { action.start(); // create a simulated response. - SearchHit hit = new SearchHit(0, "id", new Text("type"), emptyMap(), emptyMap()).sourceRef(new BytesArray("{}")); + SearchHit hit = new SearchHit(0, "id", emptyMap(), emptyMap()).sourceRef(new BytesArray("{}")); SearchHits hits = new SearchHits( IntStream.range(0, 100).mapToObj(i -> hit).toArray(SearchHit[]::new), new TotalHits(0, TotalHits.Relation.EQUAL_TO), @@ -597,7 +588,7 @@ private void bulkRetryTestCase(boolean failWithRejection) throws Exception { DummyAsyncBulkByScrollAction action = new DummyActionWithoutBackoff(); BulkRequest request = new BulkRequest(); for (int i = 0; i < size + 1; i++) { - request.add(new IndexRequest("index", "type", "id" + i)); + request.add(new IndexRequest("index").id("id" + i)); } if (failWithRejection) { action.sendBulkRequest(request, Assert::fail); @@ -946,7 +937,6 @@ protected void IndexRequest index = (IndexRequest) item; response = new IndexResponse( shardId, - index.type(), index.id() == null ? "dummy_id" : index.id(), randomInt(20), randomIntBetween(1, 16), @@ -957,7 +947,6 @@ protected void UpdateRequest update = (UpdateRequest) item; response = new UpdateResponse( shardId, - update.type(), update.id(), randomNonNegativeLong(), randomIntBetween(1, Integer.MAX_VALUE), @@ -968,7 +957,6 @@ protected void DeleteRequest delete = (DeleteRequest) item; response = new DeleteResponse( shardId, - delete.type(), delete.id(), randomInt(20), randomIntBetween(1, 16), @@ -982,12 +970,7 @@ protected void responses[i] = new BulkItemResponse( i, item.opType(), - new Failure( - response.getIndex(), - response.getType(), - response.getId(), - new OpenSearchRejectedExecutionException() - ) + new Failure(response.getIndex(), response.getId(), new OpenSearchRejectedExecutionException()) ); } else { responses[i] = new BulkItemResponse(i, item.opType(), response); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/BulkIndexByScrollResponseTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/BulkIndexByScrollResponseTests.java index d2cb565547875..cd0ee066aec7f 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/BulkIndexByScrollResponseTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/BulkIndexByScrollResponseTests.java @@ -80,7 +80,7 @@ public void testMergeConstructor() { List bulkFailures = frequently() ? emptyList() : IntStream.range(0, between(1, 3)) - .mapToObj(j -> new BulkItemResponse.Failure("idx", "type", "id", new Exception())) + .mapToObj(j -> new BulkItemResponse.Failure("idx", "id", new Exception())) .collect(Collectors.toList()); allBulkFailures.addAll(bulkFailures); List searchFailures = frequently() diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/CancelTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/CancelTests.java index 1bab1db908ca9..bd43f05225f65 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/CancelTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/CancelTests.java @@ -77,7 +77,6 @@ public class CancelTests extends ReindexTestCase { protected static final String INDEX = "reindex-cancel-index"; - protected static final String TYPE = "reindex-cancel-type"; // Semaphore used to allow & block indexing operations during the test private static final Semaphore ALLOWED_OPERATIONS = new Semaphore(0); @@ -116,7 +115,7 @@ private void testCancel( false, true, IntStream.range(0, numDocs) - .mapToObj(i -> client().prepareIndex(INDEX, TYPE, String.valueOf(i)).setSource("n", i)) + .mapToObj(i -> client().prepareIndex().setIndex(INDEX).setId(String.valueOf(i)).setSource("n", i)) .collect(Collectors.toList()) ); @@ -247,12 +246,12 @@ public static TaskInfo findTaskToCancel(String actionName, int workerCount) { } public void testReindexCancel() throws Exception { - testCancel(ReindexAction.NAME, reindex().source(INDEX).destination("dest", TYPE), (response, total, modified) -> { + testCancel(ReindexAction.NAME, reindex().source(INDEX).destination("dest"), (response, total, modified) -> { assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request"))); refresh("dest"); - assertHitCount(client().prepareSearch("dest").setTypes(TYPE).setSize(0).get(), modified); - }, equalTo("reindex from [" + INDEX + "] to [dest][" + TYPE + "]")); + assertHitCount(client().prepareSearch("dest").setSize(0).get(), modified); + }, equalTo("reindex from [" + INDEX + "] to [dest]")); } public void testUpdateByQueryCancel() throws Exception { @@ -289,13 +288,13 @@ public void testDeleteByQueryCancel() throws Exception { public void testReindexCancelWithWorkers() throws Exception { testCancel( ReindexAction.NAME, - reindex().source(INDEX).filter(QueryBuilders.matchAllQuery()).destination("dest", TYPE).setSlices(5), + reindex().source(INDEX).filter(QueryBuilders.matchAllQuery()).destination("dest").setSlices(5), (response, total, modified) -> { assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5))); refresh("dest"); - assertHitCount(client().prepareSearch("dest").setTypes(TYPE).setSize(0).get(), modified); + assertHitCount(client().prepareSearch("dest").setSize(0).get(), modified); }, - equalTo("reindex from [" + INDEX + "] to [dest][" + TYPE + "]") + equalTo("reindex from [" + INDEX + "] to [dest]") ); } @@ -355,16 +354,16 @@ public static class BlockingOperationListener implements IndexingOperationListen @Override public Engine.Index preIndex(ShardId shardId, Engine.Index index) { - return preCheck(index, index.type()); + return preCheck(index); } @Override public Engine.Delete preDelete(ShardId shardId, Engine.Delete delete) { - return preCheck(delete, delete.type()); + return preCheck(delete); } - private T preCheck(T operation, String type) { - if ((TYPE.equals(type) == false) || (operation.origin() != Origin.PRIMARY)) { + private T preCheck(T operation) { + if ((operation.origin() != Origin.PRIMARY)) { return operation; } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ClientScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ClientScrollableHitSourceTests.java index e0c8bf604ed27..8af217e5140e1 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ClientScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ClientScrollableHitSourceTests.java @@ -47,7 +47,6 @@ import org.opensearch.client.support.AbstractClient; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.settings.Settings; -import org.opensearch.common.text.Text; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.OpenSearchRejectedExecutionException; import org.opensearch.search.SearchHit; @@ -183,7 +182,7 @@ public void testScrollKeepAlive() { private SearchResponse createSearchResponse() { // create a simulated response. - SearchHit hit = new SearchHit(0, "id", new Text("type"), emptyMap(), emptyMap()).sourceRef(new BytesArray("{}")); + SearchHit hit = new SearchHit(0, "id", emptyMap(), emptyMap()).sourceRef(new BytesArray("{}")); SearchHits hits = new SearchHits( IntStream.range(0, randomIntBetween(0, 20)).mapToObj(i -> hit).toArray(SearchHit[]::new), new TotalHits(0, TotalHits.Relation.EQUAL_TO), diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryBasicTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryBasicTests.java index 13ca95c01b72d..21bbb02fb147c 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryBasicTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryBasicTests.java @@ -74,34 +74,34 @@ protected Collection> nodePlugins() { public void testBasics() throws Exception { indexRandom( true, - client().prepareIndex("test", "test", "1").setSource("foo", "a"), - client().prepareIndex("test", "test", "2").setSource("foo", "a"), - client().prepareIndex("test", "test", "3").setSource("foo", "b"), - client().prepareIndex("test", "test", "4").setSource("foo", "c"), - client().prepareIndex("test", "test", "5").setSource("foo", "d"), - client().prepareIndex("test", "test", "6").setSource("foo", "e"), - client().prepareIndex("test", "test", "7").setSource("foo", "f") + client().prepareIndex("test").setId("1").setSource("foo", "a"), + client().prepareIndex("test").setId("2").setSource("foo", "a"), + client().prepareIndex("test").setId("3").setSource("foo", "b"), + client().prepareIndex("test").setId("4").setSource("foo", "c"), + client().prepareIndex("test").setId("5").setSource("foo", "d"), + client().prepareIndex("test").setId("6").setSource("foo", "e"), + client().prepareIndex("test").setId("7").setSource("foo", "f") ); - assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 7); + assertHitCount(client().prepareSearch("test").setSize(0).get(), 7); // Deletes two docs that matches "foo:a" assertThat(deleteByQuery().source("test").filter(termQuery("foo", "a")).refresh(true).get(), matcher().deleted(2)); - assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 5); + assertHitCount(client().prepareSearch("test").setSize(0).get(), 5); // Deletes the two first docs with limit by size DeleteByQueryRequestBuilder request = deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).size(2).refresh(true); request.source().addSort("foo.keyword", SortOrder.ASC); assertThat(request.get(), matcher().deleted(2)); - assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 3); + assertHitCount(client().prepareSearch("test").setSize(0).get(), 3); // Deletes but match no docs assertThat(deleteByQuery().source("test").filter(termQuery("foo", "no_match")).refresh(true).get(), matcher().deleted(0)); - assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 3); + assertHitCount(client().prepareSearch("test").setSize(0).get(), 3); // Deletes all remaining docs assertThat(deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).get(), matcher().deleted(3)); - assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 0); + assertHitCount(client().prepareSearch("test").setSize(0).get(), 0); } public void testDeleteByQueryWithOneIndex() throws Exception { @@ -109,7 +109,7 @@ public void testDeleteByQueryWithOneIndex() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < docs; i++) { - builders.add(client().prepareIndex("test", "doc", String.valueOf(i)).setSource("fields1", 1)); + builders.add(client().prepareIndex("test").setId(String.valueOf(i)).setSource("fields1", 1)); } indexRandom(true, true, true, builders); @@ -134,7 +134,7 @@ public void testDeleteByQueryWithMultipleIndices() throws Exception { for (int j = 0; j < docs; j++) { boolean candidate = (j < candidates[i]); - builders.add(client().prepareIndex("test-" + i, "doc", String.valueOf(j)).setSource("candidate", candidate)); + builders.add(client().prepareIndex("test-" + i).setId(String.valueOf(j)).setSource("candidate", candidate)); } } indexRandom(true, true, true, builders); @@ -151,7 +151,7 @@ public void testDeleteByQueryWithMultipleIndices() throws Exception { } public void testDeleteByQueryWithMissingIndex() throws Exception { - indexRandom(true, client().prepareIndex("test", "test", "1").setSource("foo", "a")); + indexRandom(true, client().prepareIndex("test").setId("1").setSource("foo", "a")); assertHitCount(client().prepareSearch().setSize(0).get(), 1); try { @@ -171,7 +171,7 @@ public void testDeleteByQueryWithRouting() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < docs; i++) { - builders.add(client().prepareIndex("test", "test", String.valueOf(i)).setRouting(String.valueOf(i)).setSource("field1", 1)); + builders.add(client().prepareIndex("test").setId(String.valueOf(i)).setRouting(String.valueOf(i)).setSource("field1", 1)); } indexRandom(true, true, true, builders); @@ -199,7 +199,8 @@ public void testDeleteByMatchQuery() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < docs; i++) { builders.add( - client().prepareIndex("test", "test", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setRouting(randomAlphaOfLengthBetween(1, 5)) .setSource("foo", "bar") ); @@ -217,7 +218,7 @@ public void testDeleteByMatchQuery() throws Exception { } public void testDeleteByQueryWithDateMath() throws Exception { - indexRandom(true, client().prepareIndex("test", "type", "1").setSource("d", "2013-01-01")); + indexRandom(true, client().prepareIndex("test").setId("1").setSource("d", "2013-01-01")); DeleteByQueryRequestBuilder delete = deleteByQuery().source("test").filter(rangeQuery("d").to("now-1h")); assertThat(delete.refresh(true).get(), matcher().deleted(1L)); @@ -231,7 +232,7 @@ public void testDeleteByQueryOnReadOnlyIndex() throws Exception { final int docs = randomIntBetween(1, 50); List builders = new ArrayList<>(); for (int i = 0; i < docs; i++) { - builders.add(client().prepareIndex("test", "test", Integer.toString(i)).setSource("field", 1)); + builders.add(client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", 1)); } indexRandom(true, true, true, builders); @@ -254,7 +255,7 @@ public void testDeleteByQueryOnReadOnlyAllowDeleteIndex() throws Exception { final int docs = randomIntBetween(1, 50); List builders = new ArrayList<>(); for (int i = 0; i < docs; i++) { - builders.add(client().prepareIndex("test", "test").setId(Integer.toString(i)).setSource("field", 1)); + builders.add(client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", 1)); } indexRandom(true, true, true, builders); @@ -311,15 +312,15 @@ public void testDeleteByQueryOnReadOnlyAllowDeleteIndex() throws Exception { public void testSlices() throws Exception { indexRandom( true, - client().prepareIndex("test", "test", "1").setSource("foo", "a"), - client().prepareIndex("test", "test", "2").setSource("foo", "a"), - client().prepareIndex("test", "test", "3").setSource("foo", "b"), - client().prepareIndex("test", "test", "4").setSource("foo", "c"), - client().prepareIndex("test", "test", "5").setSource("foo", "d"), - client().prepareIndex("test", "test", "6").setSource("foo", "e"), - client().prepareIndex("test", "test", "7").setSource("foo", "f") + client().prepareIndex("test").setId("1").setSource("foo", "a"), + client().prepareIndex("test").setId("2").setSource("foo", "a"), + client().prepareIndex("test").setId("3").setSource("foo", "b"), + client().prepareIndex("test").setId("4").setSource("foo", "c"), + client().prepareIndex("test").setId("5").setSource("foo", "d"), + client().prepareIndex("test").setId("6").setSource("foo", "e"), + client().prepareIndex("test").setId("7").setSource("foo", "f") ); - assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 7); + assertHitCount(client().prepareSearch("test").setSize(0).get(), 7); int slices = randomSlices(); int expectedSlices = expectedSliceStatuses(slices, "test"); @@ -329,14 +330,14 @@ public void testSlices() throws Exception { deleteByQuery().source("test").filter(termQuery("foo", "a")).refresh(true).setSlices(slices).get(), matcher().deleted(2).slices(hasSize(expectedSlices)) ); - assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 5); + assertHitCount(client().prepareSearch("test").setSize(0).get(), 5); // Delete remaining docs assertThat( deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).setSlices(slices).get(), matcher().deleted(5).slices(hasSize(expectedSlices)) ); - assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 0); + assertHitCount(client().prepareSearch("test").setSize(0).get(), 0); } public void testMultipleSources() throws Exception { @@ -348,7 +349,7 @@ public void testMultipleSources() throws Exception { docs.put(indexName, new ArrayList<>()); int numDocs = between(5, 15); for (int i = 0; i < numDocs; i++) { - docs.get(indexName).add(client().prepareIndex(indexName, "test", Integer.toString(i)).setSource("foo", "a")); + docs.get(indexName).add(client().prepareIndex(indexName).setId(Integer.toString(i)).setSource("foo", "a")); } } @@ -369,7 +370,7 @@ public void testMultipleSources() throws Exception { ); for (String index : docs.keySet()) { - assertHitCount(client().prepareSearch(index).setTypes("test").setSize(0).get(), 0); + assertHitCount(client().prepareSearch(index).setSize(0).get(), 0); } } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryConcurrentTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryConcurrentTests.java index 13101cdf59461..ff765ea0d79e8 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryConcurrentTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryConcurrentTests.java @@ -55,7 +55,7 @@ public void testConcurrentDeleteByQueriesOnDifferentDocs() throws Throwable { List builders = new ArrayList<>(); for (int i = 0; i < docs; i++) { for (int t = 0; t < threads.length; t++) { - builders.add(client().prepareIndex("test", "doc").setSource("field", t)); + builders.add(client().prepareIndex("test").setSource("field", t)); } } indexRandom(true, true, true, builders); @@ -96,7 +96,7 @@ public void testConcurrentDeleteByQueriesOnSameDocs() throws Throwable { List builders = new ArrayList<>(); for (int i = 0; i < docs; i++) { - builders.add(client().prepareIndex("test", "doc", String.valueOf(i)).setSource("foo", "bar")); + builders.add(client().prepareIndex("test").setId(String.valueOf(i)).setSource("foo", "bar")); } indexRandom(true, true, true, builders); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexBasicTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexBasicTests.java index 581cb19b0dd8b..0c660e5df9682 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexBasicTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexBasicTests.java @@ -51,31 +51,31 @@ public class ReindexBasicTests extends ReindexTestCase { public void testFiltering() throws Exception { indexRandom( true, - client().prepareIndex("source", "test", "1").setSource("foo", "a"), - client().prepareIndex("source", "test", "2").setSource("foo", "a"), - client().prepareIndex("source", "test", "3").setSource("foo", "b"), - client().prepareIndex("source", "test", "4").setSource("foo", "c") + client().prepareIndex("source").setId("1").setSource("foo", "a"), + client().prepareIndex("source").setId("2").setSource("foo", "a"), + client().prepareIndex("source").setId("3").setSource("foo", "b"), + client().prepareIndex("source").setId("4").setSource("foo", "c") ); assertHitCount(client().prepareSearch("source").setSize(0).get(), 4); // Copy all the docs - ReindexRequestBuilder copy = reindex().source("source").destination("dest", "type").refresh(true); + ReindexRequestBuilder copy = reindex().source("source").destination("dest").refresh(true); assertThat(copy.get(), matcher().created(4)); assertHitCount(client().prepareSearch("dest").setSize(0).get(), 4); // Now none of them createIndex("none"); - copy = reindex().source("source").destination("none", "type").filter(termQuery("foo", "no_match")).refresh(true); + copy = reindex().source("source").destination("none").filter(termQuery("foo", "no_match")).refresh(true); assertThat(copy.get(), matcher().created(0)); assertHitCount(client().prepareSearch("none").setSize(0).get(), 0); // Now half of them - copy = reindex().source("source").destination("dest_half", "type").filter(termQuery("foo", "a")).refresh(true); + copy = reindex().source("source").destination("dest_half").filter(termQuery("foo", "a")).refresh(true); assertThat(copy.get(), matcher().created(2)); assertHitCount(client().prepareSearch("dest_half").setSize(0).get(), 2); // Limit with maxDocs - copy = reindex().source("source").destination("dest_size_one", "type").maxDocs(1).refresh(true); + copy = reindex().source("source").destination("dest_size_one").maxDocs(1).refresh(true); assertThat(copy.get(), matcher().created(1)); assertHitCount(client().prepareSearch("dest_size_one").setSize(0).get(), 1); } @@ -84,14 +84,14 @@ public void testCopyMany() throws Exception { List docs = new ArrayList<>(); int max = between(150, 500); for (int i = 0; i < max; i++) { - docs.add(client().prepareIndex("source", "test", Integer.toString(i)).setSource("foo", "a")); + docs.add(client().prepareIndex("source").setId(Integer.toString(i)).setSource("foo", "a")); } indexRandom(true, docs); assertHitCount(client().prepareSearch("source").setSize(0).get(), max); // Copy all the docs - ReindexRequestBuilder copy = reindex().source("source").destination("dest", "type").refresh(true); + ReindexRequestBuilder copy = reindex().source("source").destination("dest").refresh(true); // Use a small batch size so we have to use more than one batch copy.source().setSize(5); assertThat(copy.get(), matcher().created(max).batches(max, 5)); @@ -99,7 +99,7 @@ public void testCopyMany() throws Exception { // Copy some of the docs int half = max / 2; - copy = reindex().source("source").destination("dest_half", "type").refresh(true); + copy = reindex().source("source").destination("dest_half").refresh(true); // Use a small batch size so we have to use more than one batch copy.source().setSize(5); copy.maxDocs(half); @@ -111,7 +111,7 @@ public void testCopyManyWithSlices() throws Exception { List docs = new ArrayList<>(); int max = between(150, 500); for (int i = 0; i < max; i++) { - docs.add(client().prepareIndex("source", "test", Integer.toString(i)).setSource("foo", "a")); + docs.add(client().prepareIndex("source").setId(Integer.toString(i)).setSource("foo", "a")); } indexRandom(true, docs); @@ -121,15 +121,15 @@ public void testCopyManyWithSlices() throws Exception { int expectedSlices = expectedSliceStatuses(slices, "source"); // Copy all the docs - ReindexRequestBuilder copy = reindex().source("source").destination("dest", "type").refresh(true).setSlices(slices); + ReindexRequestBuilder copy = reindex().source("source").destination("dest").refresh(true).setSlices(slices); // Use a small batch size so we have to use more than one batch copy.source().setSize(5); assertThat(copy.get(), matcher().created(max).batches(greaterThanOrEqualTo(max / 5)).slices(hasSize(expectedSlices))); - assertHitCount(client().prepareSearch("dest").setTypes("type").setSize(0).get(), max); + assertHitCount(client().prepareSearch("dest").setSize(0).get(), max); // Copy some of the docs int half = max / 2; - copy = reindex().source("source").destination("dest_half", "type").refresh(true).setSlices(slices); + copy = reindex().source("source").destination("dest_half").refresh(true).setSlices(slices); // Use a small batch size so we have to use more than one batch copy.source().setSize(5); copy.maxDocs(half); @@ -148,7 +148,7 @@ public void testMultipleSources() throws Exception { docs.put(indexName, new ArrayList<>()); int numDocs = between(50, 200); for (int i = 0; i < numDocs; i++) { - docs.get(indexName).add(client().prepareIndex(indexName, typeName, "id_" + sourceIndex + "_" + i).setSource("foo", "a")); + docs.get(indexName).add(client().prepareIndex(indexName).setId("id_" + sourceIndex + "_" + i).setSource("foo", "a")); } } @@ -162,7 +162,7 @@ public void testMultipleSources() throws Exception { int expectedSlices = expectedSliceStatuses(slices, docs.keySet()); String[] sourceIndexNames = docs.keySet().toArray(new String[docs.size()]); - ReindexRequestBuilder request = reindex().source(sourceIndexNames).destination("dest", "type").refresh(true).setSlices(slices); + ReindexRequestBuilder request = reindex().source(sourceIndexNames).destination("dest").refresh(true).setSlices(slices); BulkByScrollResponse response = request.get(); assertThat(response, matcher().created(allDocs.size()).slices(hasSize(expectedSlices))); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFailureTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFailureTests.java index db9c2779928ea..c893c5c5b180f 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFailureTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFailureTests.java @@ -56,7 +56,7 @@ public void testFailuresCauseAbortDefault() throws Exception { * Create the destination index such that the copy will cause a mapping * conflict on every request. */ - indexRandom(true, client().prepareIndex("dest", "_doc", "test").setSource("test", 10) /* Its a string in the source! */); + indexRandom(true, client().prepareIndex("dest").setId("test").setSource("test", 10) /* Its a string in the source! */); indexDocs(100); @@ -77,7 +77,7 @@ public void testFailuresCauseAbortDefault() throws Exception { public void testAbortOnVersionConflict() throws Exception { // Just put something in the way of the copy. - indexRandom(true, client().prepareIndex("dest", "_doc", "1").setSource("test", "test")); + indexRandom(true, client().prepareIndex("dest").setId("1").setSource("test", "test")); indexDocs(100); @@ -139,7 +139,7 @@ public void testResponseOnSearchFailure() throws Exception { private void indexDocs(int count) throws Exception { List docs = new ArrayList<>(count); for (int i = 0; i < count; i++) { - docs.add(client().prepareIndex("source", "_doc", Integer.toString(i)).setSource("test", "words words")); + docs.add(client().prepareIndex("source").setId(Integer.toString(i)).setSource("test", "words words")); } indexRandom(true, docs); } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWhitelistTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWhitelistTests.java index e083b877236aa..8012b67253cb6 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWhitelistTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWhitelistTests.java @@ -44,22 +44,22 @@ import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; -import static org.opensearch.index.reindex.ReindexValidator.buildRemoteWhitelist; -import static org.opensearch.index.reindex.ReindexValidator.checkRemoteWhitelist; +import static org.opensearch.index.reindex.ReindexValidator.buildRemoteAllowlist; +import static org.opensearch.index.reindex.ReindexValidator.checkRemoteAllowlist; /** - * Tests the reindex-from-remote whitelist of remotes. + * Tests the reindex-from-remote allowlist of remotes. */ public class ReindexFromRemoteWhitelistTests extends OpenSearchTestCase { private final BytesReference query = new BytesArray("{ \"foo\" : \"bar\" }"); public void testLocalRequestWithoutWhitelist() { - checkRemoteWhitelist(buildRemoteWhitelist(emptyList()), null); + checkRemoteAllowlist(buildRemoteAllowlist(emptyList()), null); } public void testLocalRequestWithWhitelist() { - checkRemoteWhitelist(buildRemoteWhitelist(randomWhitelist()), null); + checkRemoteAllowlist(buildRemoteAllowlist(randomAllowlist()), null); } /** @@ -81,16 +81,16 @@ private RemoteInfo newRemoteInfo(String host, int port) { } public void testWhitelistedRemote() { - List whitelist = randomWhitelist(); - String[] inList = whitelist.iterator().next().split(":"); + List allowlist = randomAllowlist(); + String[] inList = allowlist.iterator().next().split(":"); String host = inList[0]; int port = Integer.valueOf(inList[1]); - checkRemoteWhitelist(buildRemoteWhitelist(whitelist), newRemoteInfo(host, port)); + checkRemoteAllowlist(buildRemoteAllowlist(allowlist), newRemoteInfo(host, port)); } public void testWhitelistedByPrefix() { - checkRemoteWhitelist( - buildRemoteWhitelist(singletonList("*.example.com:9200")), + checkRemoteAllowlist( + buildRemoteAllowlist(singletonList("*.example.com:9200")), new RemoteInfo( randomAlphaOfLength(5), "es.example.com", @@ -104,34 +104,34 @@ public void testWhitelistedByPrefix() { RemoteInfo.DEFAULT_CONNECT_TIMEOUT ) ); - checkRemoteWhitelist( - buildRemoteWhitelist(singletonList("*.example.com:9200")), + checkRemoteAllowlist( + buildRemoteAllowlist(singletonList("*.example.com:9200")), newRemoteInfo("6e134134a1.us-east-1.aws.example.com", 9200) ); } public void testWhitelistedBySuffix() { - checkRemoteWhitelist(buildRemoteWhitelist(singletonList("es.example.com:*")), newRemoteInfo("es.example.com", 9200)); + checkRemoteAllowlist(buildRemoteAllowlist(singletonList("es.example.com:*")), newRemoteInfo("es.example.com", 9200)); } public void testWhitelistedByInfix() { - checkRemoteWhitelist(buildRemoteWhitelist(singletonList("es*.example.com:9200")), newRemoteInfo("es1.example.com", 9200)); + checkRemoteAllowlist(buildRemoteAllowlist(singletonList("es*.example.com:9200")), newRemoteInfo("es1.example.com", 9200)); } public void testLoopbackInWhitelistRemote() throws UnknownHostException { - List whitelist = randomWhitelist(); - whitelist.add("127.0.0.1:*"); - checkRemoteWhitelist(buildRemoteWhitelist(whitelist), newRemoteInfo("127.0.0.1", 9200)); + List allowlist = randomAllowlist(); + allowlist.add("127.0.0.1:*"); + checkRemoteAllowlist(buildRemoteAllowlist(allowlist), newRemoteInfo("127.0.0.1", 9200)); } public void testUnwhitelistedRemote() { int port = between(1, Integer.MAX_VALUE); - List whitelist = randomBoolean() ? randomWhitelist() : emptyList(); + List allowlist = randomBoolean() ? randomAllowlist() : emptyList(); Exception e = expectThrows( IllegalArgumentException.class, - () -> checkRemoteWhitelist(buildRemoteWhitelist(whitelist), newRemoteInfo("not in list", port)) + () -> checkRemoteAllowlist(buildRemoteAllowlist(allowlist), newRemoteInfo("not in list", port)) ); - assertEquals("[not in list:" + port + "] not whitelisted in reindex.remote.whitelist", e.getMessage()); + assertEquals("[not in list:" + port + "] not allowlisted in reindex.remote.allowlist", e.getMessage()); } public void testRejectMatchAll() { @@ -140,22 +140,22 @@ public void testRejectMatchAll() { assertMatchesTooMuch(singletonList("***")); assertMatchesTooMuch(Arrays.asList("realstuff", "*")); assertMatchesTooMuch(Arrays.asList("*", "realstuff")); - List random = randomWhitelist(); + List random = randomAllowlist(); random.add("*"); assertMatchesTooMuch(random); } public void testIPv6Address() { - List whitelist = randomWhitelist(); - whitelist.add("[::1]:*"); - checkRemoteWhitelist(buildRemoteWhitelist(whitelist), newRemoteInfo("[::1]", 9200)); + List allowlist = randomAllowlist(); + allowlist.add("[::1]:*"); + checkRemoteAllowlist(buildRemoteAllowlist(allowlist), newRemoteInfo("[::1]", 9200)); } - private void assertMatchesTooMuch(List whitelist) { - Exception e = expectThrows(IllegalArgumentException.class, () -> buildRemoteWhitelist(whitelist)); + private void assertMatchesTooMuch(List allowlist) { + Exception e = expectThrows(IllegalArgumentException.class, () -> buildRemoteAllowlist(allowlist)); assertEquals( - "Refusing to start because whitelist " - + whitelist + "Refusing to start because allowlist " + + allowlist + " accepts all addresses. " + "This would allow users to reindex-from-remote any URL they like effectively having OpenSearch make HTTP GETs " + "for them.", @@ -163,12 +163,12 @@ private void assertMatchesTooMuch(List whitelist) { ); } - private List randomWhitelist() { + private List randomAllowlist() { int size = between(1, 100); - List whitelist = new ArrayList<>(size); + List allowlist = new ArrayList<>(size); for (int i = 0; i < size; i++) { - whitelist.add(randomAlphaOfLength(5) + ':' + between(1, Integer.MAX_VALUE)); + allowlist.add(randomAlphaOfLength(5) + ':' + between(1, Integer.MAX_VALUE)); } - return whitelist; + return allowlist; } } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java index a44880c645d34..8ce850a936557 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java @@ -98,15 +98,15 @@ protected boolean addMockHttpTransport() { @Override protected Settings nodeSettings() { Settings.Builder settings = Settings.builder().put(super.nodeSettings()); - // Whitelist reindexing from the http host we're going to use - settings.put(TransportReindexAction.REMOTE_CLUSTER_WHITELIST.getKey(), "127.0.0.1:*"); + // Allowlist reindexing from the http host we're going to use + settings.put(TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.getKey(), "127.0.0.1:*"); settings.put(NetworkModule.HTTP_TYPE_KEY, Netty4Plugin.NETTY_HTTP_TRANSPORT_NAME); return settings.build(); } @Before public void setupSourceIndex() { - client().prepareIndex("source", "test").setSource("test", "test").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("source").setSource("test", "test").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); } @Before diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexRenamedSettingTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexRenamedSettingTests.java new file mode 100644 index 0000000000000..8ff84223d371e --- /dev/null +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexRenamedSettingTests.java @@ -0,0 +1,83 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.reindex; + +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.Arrays; +import java.util.List; + +/** + * A unit test to validate the former name of the setting 'reindex.remote.allowlist' still take effect, + * after it is deprecated, so that the backwards compatibility is maintained. + * The test can be removed along with removing support of the deprecated setting. + */ +public class ReindexRenamedSettingTests extends OpenSearchTestCase { + private final ReindexPlugin plugin = new ReindexPlugin(); + + /** + * Validate the both settings are known and supported. + */ + public void testReindexSettingsExist() { + List> settings = plugin.getSettings(); + assertTrue( + "Both 'reindex.remote.allowlist' and its predecessor should be supported settings of Reindex plugin", + settings.containsAll( + Arrays.asList(TransportReindexAction.REMOTE_CLUSTER_WHITELIST, TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST) + ) + ); + } + + /** + * Validate the default value of the both settings is the same. + */ + public void testSettingFallback() { + assertEquals( + TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.get(Settings.EMPTY), + TransportReindexAction.REMOTE_CLUSTER_WHITELIST.get(Settings.EMPTY) + ); + } + + /** + * Validate the new setting can be configured correctly, and it doesn't impact the old setting. + */ + public void testSettingGetValue() { + Settings settings = Settings.builder().put("reindex.remote.allowlist", "127.0.0.1:*").build(); + assertEquals(TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.get(settings), Arrays.asList("127.0.0.1:*")); + assertEquals( + TransportReindexAction.REMOTE_CLUSTER_WHITELIST.get(settings), + TransportReindexAction.REMOTE_CLUSTER_WHITELIST.getDefault(Settings.EMPTY) + ); + } + + /** + * Validate the value of the old setting will be applied to the new setting, if the new setting is not configured. + */ + public void testSettingGetValueWithFallback() { + Settings settings = Settings.builder().put("reindex.remote.whitelist", "127.0.0.1:*").build(); + assertEquals(TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.get(settings), Arrays.asList("127.0.0.1:*")); + assertSettingDeprecationsAndWarnings(new Setting[] { TransportReindexAction.REMOTE_CLUSTER_WHITELIST }); + } + + /** + * Validate the value of the old setting will be ignored, if the new setting is configured. + */ + public void testSettingGetValueWhenBothAreConfigured() { + Settings settings = Settings.builder() + .put("reindex.remote.allowlist", "127.0.0.1:*") + .put("reindex.remote.whitelist", "[::1]:*, 127.0.0.1:*") + .build(); + assertEquals(TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.get(settings), Arrays.asList("127.0.0.1:*")); + assertEquals(TransportReindexAction.REMOTE_CLUSTER_WHITELIST.get(settings), Arrays.asList("[::1]:*", "127.0.0.1:*")); + assertSettingDeprecationsAndWarnings(new Setting[] { TransportReindexAction.REMOTE_CLUSTER_WHITELIST }); + } + +} diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexScriptTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexScriptTests.java index bd6eba132af21..85f0c3c24abee 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexScriptTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexScriptTests.java @@ -60,20 +60,6 @@ public void testSettingIndexToNullIsError() throws Exception { } } - public void testSetType() throws Exception { - Object type = randomFrom(new Object[] { 234, 234L, "pancake" }); - IndexRequest index = applyScript((Map ctx) -> ctx.put("_type", type)); - assertEquals(type.toString(), index.type()); - } - - public void testSettingTypeToNullIsError() throws Exception { - try { - applyScript((Map ctx) -> ctx.put("_type", null)); - } catch (NullPointerException e) { - assertThat(e.getMessage(), containsString("Can't reindex without a destination type!")); - } - } - public void testSetId() throws Exception { Object id = randomFrom(new Object[] { null, 234, 234L, "pancake" }); IndexRequest index = applyScript((Map ctx) -> ctx.put("_id", id)); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexSingleNodeTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexSingleNodeTests.java index 8d675916437e9..8ce9cf74bb8be 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexSingleNodeTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexSingleNodeTests.java @@ -52,7 +52,7 @@ protected Collection> getPlugins() { public void testDeprecatedSort() { int max = between(2, 20); for (int i = 0; i < max; i++) { - client().prepareIndex("source", "_doc").setId(Integer.toString(i)).setSource("foo", i).get(); + client().prepareIndex("source").setId(Integer.toString(i)).setSource("foo", i).get(); } client().admin().indices().prepareRefresh("source").get(); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexVersioningTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexVersioningTests.java index 7181fa9f4d273..733ccc6b61127 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexVersioningTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexVersioningTests.java @@ -127,20 +127,20 @@ private BulkByScrollResponse reindexCreate() { private void setupSourceAbsent() throws Exception { indexRandom( true, - client().prepareIndex("source", "_doc", "test").setVersionType(EXTERNAL).setVersion(SOURCE_VERSION).setSource("foo", "source") + client().prepareIndex("source").setId("test").setVersionType(EXTERNAL).setVersion(SOURCE_VERSION).setSource("foo", "source") ); - assertEquals(SOURCE_VERSION, client().prepareGet("source", "_doc", "test").get().getVersion()); + assertEquals(SOURCE_VERSION, client().prepareGet("source", "test").get().getVersion()); } private void setupDest(int version) throws Exception { setupSourceAbsent(); indexRandom( true, - client().prepareIndex("dest", "_doc", "test").setVersionType(EXTERNAL).setVersion(version).setSource("foo", "dest") + client().prepareIndex("dest").setId("test").setVersionType(EXTERNAL).setVersion(version).setSource("foo", "dest") ); - assertEquals(version, client().prepareGet("dest", "_doc", "test").get().getVersion()); + assertEquals(version, client().prepareGet("dest", "test").get().getVersion()); } private void setupDestOlder() throws Exception { @@ -152,7 +152,7 @@ private void setupDestNewer() throws Exception { } private void assertDest(String fooValue, int version) { - GetResponse get = client().prepareGet("dest", "_doc", "test").get(); + GetResponse get = client().prepareGet("dest", "test").get(); assertEquals(fooValue, get.getSource().get("foo")); assertEquals(version, get.getVersion()); } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestDeleteByQueryActionTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RestDeleteByQueryActionTests.java deleted file mode 100644 index 5341bcd0fee5d..0000000000000 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestDeleteByQueryActionTests.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.index.reindex; - -import org.opensearch.common.xcontent.NamedXContentRegistry; -import org.opensearch.rest.RestRequest; -import org.opensearch.rest.action.search.RestSearchAction; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.junit.Before; - -import java.io.IOException; - -import static java.util.Collections.emptyList; - -public class RestDeleteByQueryActionTests extends RestActionTestCase { - private RestDeleteByQueryAction action; - - @Before - public void setUpAction() { - action = new RestDeleteByQueryAction(); - controller().registerHandler(action); - } - - public void testTypeInPath() throws IOException { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) - .withPath("/some_index/some_type/_delete_by_query") - .build(); - - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteLocallyVerifier((arg1, arg2) -> null); - - dispatchRequest(request); - - // checks the type in the URL is propagated correctly to the request object - // only works after the request is dispatched, so its params are filled from url. - DeleteByQueryRequest dbqRequest = action.buildRequest(request, DEFAULT_NAMED_WRITABLE_REGISTRY); - assertArrayEquals(new String[] { "some_type" }, dbqRequest.getDocTypes()); - - // RestDeleteByQueryAction itself doesn't check for a deprecated type usage - // checking here for a deprecation from its internal search request - assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testParseEmpty() throws IOException { - final FakeRestRequest restRequest = new FakeRestRequest.Builder(new NamedXContentRegistry(emptyList())).build(); - DeleteByQueryRequest request = action.buildRequest(restRequest, DEFAULT_NAMED_WRITABLE_REGISTRY); - assertEquals(AbstractBulkByScrollRequest.SIZE_ALL_MATCHES, request.getSize()); - assertEquals(AbstractBulkByScrollRequest.DEFAULT_SCROLL_SIZE, request.getSearchRequest().source().size()); - } -} diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestReindexActionTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RestReindexActionTests.java index 508cfefa1679c..aa8221b045d3f 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestReindexActionTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/RestReindexActionTests.java @@ -38,13 +38,11 @@ import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.rest.RestRequest.Method; import org.opensearch.test.rest.FakeRestRequest; import org.opensearch.test.rest.RestActionTestCase; import org.junit.Before; import java.io.IOException; -import java.util.Arrays; import java.util.Collections; import static java.util.Collections.singletonMap; @@ -102,52 +100,4 @@ public void testSetScrollTimeout() throws IOException { assertEquals("10m", request.getScrollTime().toString()); } } - - /** - * test deprecation is logged if one or more types are used in source search request inside reindex - */ - public void testTypeInSource() throws IOException { - FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.POST) - .withPath("/_reindex"); - XContentBuilder b = JsonXContent.contentBuilder().startObject(); - { - b.startObject("source"); - { - b.field("type", randomFrom(Arrays.asList("\"t1\"", "[\"t1\", \"t2\"]", "\"_doc\""))); - } - b.endObject(); - } - b.endObject(); - requestBuilder.withContent(new BytesArray(BytesReference.bytes(b).toBytesRef()), XContentType.JSON); - - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteLocallyVerifier((arg1, arg2) -> null); - - dispatchRequest(requestBuilder.build()); - assertWarnings(ReindexRequest.TYPES_DEPRECATION_MESSAGE); - } - - /** - * test deprecation is logged if a type is used in the destination index request inside reindex - */ - public void testTypeInDestination() throws IOException { - FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.POST) - .withPath("/_reindex"); - XContentBuilder b = JsonXContent.contentBuilder().startObject(); - { - b.startObject("dest"); - { - b.field("type", (randomBoolean() ? "_doc" : randomAlphaOfLength(4))); - } - b.endObject(); - } - b.endObject(); - requestBuilder.withContent(new BytesArray(BytesReference.bytes(b).toBytesRef()), XContentType.JSON); - - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteLocallyVerifier((arg1, arg2) -> null); - - dispatchRequest(requestBuilder.build()); - assertWarnings(ReindexRequest.TYPES_DEPRECATION_MESSAGE); - } } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestUpdateByQueryActionTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RestUpdateByQueryActionTests.java deleted file mode 100644 index 743f0e8a852f4..0000000000000 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestUpdateByQueryActionTests.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.index.reindex; - -import org.opensearch.common.xcontent.NamedXContentRegistry; -import org.opensearch.rest.RestRequest; -import org.opensearch.rest.action.search.RestSearchAction; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.junit.Before; - -import java.io.IOException; - -import static java.util.Collections.emptyList; - -public class RestUpdateByQueryActionTests extends RestActionTestCase { - - private RestUpdateByQueryAction action; - - @Before - public void setUpAction() { - action = new RestUpdateByQueryAction(); - controller().registerHandler(action); - } - - public void testTypeInPath() throws IOException { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) - .withPath("/some_index/some_type/_update_by_query") - .build(); - - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteLocallyVerifier((arg1, arg2) -> null); - - dispatchRequest(request); - - // checks the type in the URL is propagated correctly to the request object - // only works after the request is dispatched, so its params are filled from url. - UpdateByQueryRequest ubqRequest = action.buildRequest(request, DEFAULT_NAMED_WRITABLE_REGISTRY); - assertArrayEquals(new String[] { "some_type" }, ubqRequest.getDocTypes()); - - // RestUpdateByQueryAction itself doesn't check for a deprecated type usage - // checking here for a deprecation from its internal search request - assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testParseEmpty() throws IOException { - final FakeRestRequest restRequest = new FakeRestRequest.Builder(new NamedXContentRegistry(emptyList())).build(); - UpdateByQueryRequest request = action.buildRequest(restRequest, DEFAULT_NAMED_WRITABLE_REGISTRY); - assertEquals(AbstractBulkByScrollRequest.SIZE_ALL_MATCHES, request.getSize()); - assertEquals(AbstractBulkByScrollRequest.DEFAULT_SCROLL_SIZE, request.getSearchRequest().source().size()); - } -} diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RethrottleTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RethrottleTests.java index 3f46d621ab8d5..6bedd59515e45 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RethrottleTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/RethrottleTests.java @@ -99,7 +99,7 @@ private void testCase(AbstractBulkByScrollRequestBuilder request, String a List docs = new ArrayList<>(); for (int i = 0; i < numSlices * 10; i++) { - docs.add(client().prepareIndex("test", "test", Integer.toString(i)).setSource("foo", "bar")); + docs.add(client().prepareIndex("test").setId(Integer.toString(i)).setSource("foo", "bar")); } indexRandom(true, docs); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java index 546f9b07e90b7..124670dba9510 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java @@ -102,8 +102,8 @@ protected boolean addMockHttpTransport() { final Settings nodeSettings() { return Settings.builder() - // whitelist reindexing from the HTTP host we're going to use - .put(TransportReindexAction.REMOTE_CLUSTER_WHITELIST.getKey(), "127.0.0.1:*") + // allowlist reindexing from the HTTP host we're going to use + .put(TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.getKey(), "127.0.0.1:*") .build(); } @@ -198,7 +198,7 @@ private void testCase( // Build the test data. Don't use indexRandom because that won't work consistently with such small thread pools. BulkRequestBuilder bulk = client().prepareBulk(); for (int i = 0; i < DOC_COUNT; i++) { - bulk.add(client().prepareIndex("source", "test").setSource("foo", "bar " + i)); + bulk.add(client().prepareIndex("source").setSource("foo", "bar " + i)); } Retry retry = new Retry(BackoffPolicy.exponentialBackoff(), client().threadPool()); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryBasicTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryBasicTests.java index d803eff25d081..4f48b99dccdd4 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryBasicTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryBasicTests.java @@ -50,54 +50,54 @@ public class UpdateByQueryBasicTests extends ReindexTestCase { public void testBasics() throws Exception { indexRandom( true, - client().prepareIndex("test", "test", "1").setSource("foo", "a"), - client().prepareIndex("test", "test", "2").setSource("foo", "a"), - client().prepareIndex("test", "test", "3").setSource("foo", "b"), - client().prepareIndex("test", "test", "4").setSource("foo", "c") + client().prepareIndex("test").setId("1").setSource("foo", "a"), + client().prepareIndex("test").setId("2").setSource("foo", "a"), + client().prepareIndex("test").setId("3").setSource("foo", "b"), + client().prepareIndex("test").setId("4").setSource("foo", "c") ); - assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 4); - assertEquals(1, client().prepareGet("test", "test", "1").get().getVersion()); - assertEquals(1, client().prepareGet("test", "test", "4").get().getVersion()); + assertHitCount(client().prepareSearch("test").setSize(0).get(), 4); + assertEquals(1, client().prepareGet("test", "1").get().getVersion()); + assertEquals(1, client().prepareGet("test", "4").get().getVersion()); // Reindex all the docs assertThat(updateByQuery().source("test").refresh(true).get(), matcher().updated(4)); - assertEquals(2, client().prepareGet("test", "test", "1").get().getVersion()); - assertEquals(2, client().prepareGet("test", "test", "4").get().getVersion()); + assertEquals(2, client().prepareGet("test", "1").get().getVersion()); + assertEquals(2, client().prepareGet("test", "4").get().getVersion()); // Now none of them assertThat(updateByQuery().source("test").filter(termQuery("foo", "no_match")).refresh(true).get(), matcher().updated(0)); - assertEquals(2, client().prepareGet("test", "test", "1").get().getVersion()); - assertEquals(2, client().prepareGet("test", "test", "4").get().getVersion()); + assertEquals(2, client().prepareGet("test", "1").get().getVersion()); + assertEquals(2, client().prepareGet("test", "4").get().getVersion()); // Now half of them assertThat(updateByQuery().source("test").filter(termQuery("foo", "a")).refresh(true).get(), matcher().updated(2)); - assertEquals(3, client().prepareGet("test", "test", "1").get().getVersion()); - assertEquals(3, client().prepareGet("test", "test", "2").get().getVersion()); - assertEquals(2, client().prepareGet("test", "test", "3").get().getVersion()); - assertEquals(2, client().prepareGet("test", "test", "4").get().getVersion()); + assertEquals(3, client().prepareGet("test", "1").get().getVersion()); + assertEquals(3, client().prepareGet("test", "2").get().getVersion()); + assertEquals(2, client().prepareGet("test", "3").get().getVersion()); + assertEquals(2, client().prepareGet("test", "4").get().getVersion()); // Limit with size UpdateByQueryRequestBuilder request = updateByQuery().source("test").size(3).refresh(true); request.source().addSort("foo.keyword", SortOrder.ASC); assertThat(request.get(), matcher().updated(3)); // Only the first three documents are updated because of sort - assertEquals(4, client().prepareGet("test", "test", "1").get().getVersion()); - assertEquals(4, client().prepareGet("test", "test", "2").get().getVersion()); - assertEquals(3, client().prepareGet("test", "test", "3").get().getVersion()); - assertEquals(2, client().prepareGet("test", "test", "4").get().getVersion()); + assertEquals(4, client().prepareGet("test", "1").get().getVersion()); + assertEquals(4, client().prepareGet("test", "2").get().getVersion()); + assertEquals(3, client().prepareGet("test", "3").get().getVersion()); + assertEquals(2, client().prepareGet("test", "4").get().getVersion()); } public void testSlices() throws Exception { indexRandom( true, - client().prepareIndex("test", "test", "1").setSource("foo", "a"), - client().prepareIndex("test", "test", "2").setSource("foo", "a"), - client().prepareIndex("test", "test", "3").setSource("foo", "b"), - client().prepareIndex("test", "test", "4").setSource("foo", "c") + client().prepareIndex("test").setId("1").setSource("foo", "a"), + client().prepareIndex("test").setId("2").setSource("foo", "a"), + client().prepareIndex("test").setId("3").setSource("foo", "b"), + client().prepareIndex("test").setId("4").setSource("foo", "c") ); - assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 4); - assertEquals(1, client().prepareGet("test", "test", "1").get().getVersion()); - assertEquals(1, client().prepareGet("test", "test", "4").get().getVersion()); + assertHitCount(client().prepareSearch("test").setSize(0).get(), 4); + assertEquals(1, client().prepareGet("test", "1").get().getVersion()); + assertEquals(1, client().prepareGet("test", "4").get().getVersion()); int slices = randomSlices(2, 10); int expectedSlices = expectedSliceStatuses(slices, "test"); @@ -107,26 +107,26 @@ public void testSlices() throws Exception { updateByQuery().source("test").refresh(true).setSlices(slices).get(), matcher().updated(4).slices(hasSize(expectedSlices)) ); - assertEquals(2, client().prepareGet("test", "test", "1").get().getVersion()); - assertEquals(2, client().prepareGet("test", "test", "4").get().getVersion()); + assertEquals(2, client().prepareGet("test", "1").get().getVersion()); + assertEquals(2, client().prepareGet("test", "4").get().getVersion()); // Now none of them assertThat( updateByQuery().source("test").filter(termQuery("foo", "no_match")).setSlices(slices).refresh(true).get(), matcher().updated(0).slices(hasSize(expectedSlices)) ); - assertEquals(2, client().prepareGet("test", "test", "1").get().getVersion()); - assertEquals(2, client().prepareGet("test", "test", "4").get().getVersion()); + assertEquals(2, client().prepareGet("test", "1").get().getVersion()); + assertEquals(2, client().prepareGet("test", "4").get().getVersion()); // Now half of them assertThat( updateByQuery().source("test").filter(termQuery("foo", "a")).refresh(true).setSlices(slices).get(), matcher().updated(2).slices(hasSize(expectedSlices)) ); - assertEquals(3, client().prepareGet("test", "test", "1").get().getVersion()); - assertEquals(3, client().prepareGet("test", "test", "2").get().getVersion()); - assertEquals(2, client().prepareGet("test", "test", "3").get().getVersion()); - assertEquals(2, client().prepareGet("test", "test", "4").get().getVersion()); + assertEquals(3, client().prepareGet("test", "1").get().getVersion()); + assertEquals(3, client().prepareGet("test", "2").get().getVersion()); + assertEquals(2, client().prepareGet("test", "3").get().getVersion()); + assertEquals(2, client().prepareGet("test", "4").get().getVersion()); } public void testMultipleSources() throws Exception { @@ -138,7 +138,7 @@ public void testMultipleSources() throws Exception { docs.put(indexName, new ArrayList<>()); int numDocs = between(5, 15); for (int i = 0; i < numDocs; i++) { - docs.get(indexName).add(client().prepareIndex(indexName, "test", Integer.toString(i)).setSource("foo", "a")); + docs.get(indexName).add(client().prepareIndex(indexName).setId(Integer.toString(i)).setSource("foo", "a")); } } @@ -159,7 +159,7 @@ public void testMultipleSources() throws Exception { String index = entry.getKey(); List indexDocs = entry.getValue(); int randomDoc = between(0, indexDocs.size() - 1); - assertEquals(2, client().prepareGet(index, "test", Integer.toString(randomDoc)).get().getVersion()); + assertEquals(2, client().prepareGet(index, Integer.toString(randomDoc)).get().getVersion()); } } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWhileModifyingTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWhileModifyingTests.java index 3685fc5f124c9..3c2e302cb85e7 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWhileModifyingTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWhileModifyingTests.java @@ -55,7 +55,7 @@ public class UpdateByQueryWhileModifyingTests extends ReindexTestCase { public void testUpdateWhileReindexing() throws Exception { AtomicReference value = new AtomicReference<>(randomSimpleString(random())); - indexRandom(true, client().prepareIndex("test", "test", "test").setSource("test", value.get())); + indexRandom(true, client().prepareIndex("test").setId("test").setSource("test", value.get())); AtomicReference failure = new AtomicReference<>(); AtomicBoolean keepUpdating = new AtomicBoolean(true); @@ -76,10 +76,11 @@ public void testUpdateWhileReindexing() throws Exception { try { for (int i = 0; i < MAX_MUTATIONS; i++) { - GetResponse get = client().prepareGet("test", "test", "test").get(); + GetResponse get = client().prepareGet("test", "test").get(); assertEquals(value.get(), get.getSource().get("test")); value.set(randomSimpleString(random())); - IndexRequestBuilder index = client().prepareIndex("test", "test", "test") + IndexRequestBuilder index = client().prepareIndex("test") + .setId("test") .setSource("test", value.get()) .setRefreshPolicy(IMMEDIATE); /* @@ -106,7 +107,7 @@ public void testUpdateWhileReindexing() throws Exception { get.getVersion(), attempts ); - get = client().prepareGet("test", "test", "test").get(); + get = client().prepareGet("test", "test").get(); } } } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWithScriptTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWithScriptTests.java index b72f66ce11277..ce982dcb6bd34 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWithScriptTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWithScriptTests.java @@ -56,7 +56,7 @@ public void testModifyingCtxNotAllowed() { * error message to the user, not some ClassCastException. */ Object[] options = new Object[] { "cat", new Object(), 123, new Date(), Math.PI }; - for (String ctxVar : new String[] { "_index", "_type", "_id", "_version", "_routing" }) { + for (String ctxVar : new String[] { "_index", "_id", "_version", "_routing" }) { try { applyScript((Map ctx) -> ctx.put(ctxVar, randomFrom(options))); } catch (IllegalArgumentException e) { diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteRequestBuildersTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteRequestBuildersTests.java index 541134f9403ba..c349bc54bcbd9 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteRequestBuildersTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteRequestBuildersTests.java @@ -78,27 +78,25 @@ public void testIntialSearchPath() { SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); assertEquals("/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint()); searchRequest.indices("a"); - searchRequest.types("b"); - assertEquals("/a/b/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint()); + assertEquals("/a/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint()); searchRequest.indices("a", "b"); - searchRequest.types("c", "d"); - assertEquals("/a,b/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint()); + assertEquals("/a,b/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint()); searchRequest.indices("cat,"); - assertEquals("/cat%2C/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint()); + assertEquals("/cat%2C/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint()); searchRequest.indices("cat/"); - assertEquals("/cat%2F/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint()); + assertEquals("/cat%2F/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint()); searchRequest.indices("cat/", "dog"); - assertEquals("/cat%2F,dog/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint()); + assertEquals("/cat%2F,dog/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint()); // test a specific date math + all characters that need escaping. searchRequest.indices("", "<>/{}|+:,"); assertEquals( - "/%3Ccat%7Bnow%2Fd%7D%3E,%3C%3E%2F%7B%7D%7C%2B%3A%2C/c,d/_search", + "/%3Ccat%7Bnow%2Fd%7D%3E,%3C%3E%2F%7B%7D%7C%2B%3A%2C/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint() ); // pass-through if already escaped. searchRequest.indices("%2f", "%3a"); - assertEquals("/%2f,%3a/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint()); + assertEquals("/%2f,%3a/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint()); assertWarnings(DEPRECATED_URL_ENCODED_INDEX_WARNING); @@ -107,20 +105,6 @@ public void testIntialSearchPath() { expectBadStartRequest(searchRequest, "Index", ",", "%2fcat,"); searchRequest.indices("%3ccat/"); expectBadStartRequest(searchRequest, "Index", "/", "%3ccat/"); - - searchRequest.indices("ok"); - searchRequest.types("cat,"); - expectBadStartRequest(searchRequest, "Type", ",", "cat,"); - searchRequest.types("cat,", "dog"); - expectBadStartRequest(searchRequest, "Type", ",", "cat,"); - searchRequest.types("dog", "cat,"); - expectBadStartRequest(searchRequest, "Type", ",", "cat,"); - searchRequest.types("cat/"); - expectBadStartRequest(searchRequest, "Type", "/", "cat/"); - searchRequest.types("cat/", "dog"); - expectBadStartRequest(searchRequest, "Type", "/", "cat/"); - searchRequest.types("dog", "cat/"); - expectBadStartRequest(searchRequest, "Type", "/", "cat/"); } private void expectBadStartRequest(SearchRequest searchRequest, String type, String bad, String failed) { diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java index 54cb39c736ff8..337bc67796f8e 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java @@ -178,7 +178,6 @@ public void testParseStartOk() throws Exception { assertThat(r.getFailures(), empty()); assertThat(r.getHits(), hasSize(1)); assertEquals("test", r.getHits().get(0).getIndex()); - assertEquals("test", r.getHits().get(0).getType()); assertEquals("AVToMiC250DjIiBO3yJ_", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test2\"}", r.getHits().get(0).getSource().utf8ToString()); assertNull(r.getHits().get(0).getRouting()); @@ -196,7 +195,6 @@ public void testParseScrollOk() throws Exception { assertThat(r.getFailures(), empty()); assertThat(r.getHits(), hasSize(1)); assertEquals("test", r.getHits().get(0).getIndex()); - assertEquals("test", r.getHits().get(0).getType()); assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); assertNull(r.getHits().get(0).getRouting()); @@ -246,7 +244,6 @@ public void testScanJumpStart() throws Exception { assertThat(r.getFailures(), empty()); assertThat(r.getHits(), hasSize(1)); assertEquals("test", r.getHits().get(0).getIndex()); - assertEquals("test", r.getHits().get(0).getType()); assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); assertNull(r.getHits().get(0).getRouting()); @@ -277,7 +274,6 @@ public void testParseRejection() throws Exception { ); assertThat(r.getHits(), hasSize(1)); assertEquals("test", r.getHits().get(0).getIndex()); - assertEquals("test", r.getHits().get(0).getType()); assertEquals("AVToMiC250DjIiBO3yJ_", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test1\"}", r.getHits().get(0).getSource().utf8ToString()); called.set(true); @@ -308,7 +304,6 @@ public void testParseFailureWithStatus() throws Exception { ); assertThat(r.getHits(), hasSize(1)); assertEquals("test", r.getHits().get(0).getIndex()); - assertEquals("test", r.getHits().get(0).getType()); assertEquals("10000", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test10000\"}", r.getHits().get(0).getSource().utf8ToString()); called.set(true); diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/10_basic.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/10_basic.yml index c47d8ff0e0756..7783bbd1f9476 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/10_basic.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/10_basic.yml @@ -91,7 +91,6 @@ - skip: version: "6.7.0 - " reason: reindex moved to rely on sequence numbers for concurrency control - - do: indices.create: index: test @@ -124,7 +123,6 @@ - match: {version_conflicts: 1} - match: {batches: 1} - match: {failures.0.index: test} - - match: {failures.0.type: _doc} - match: {failures.0.id: "1"} - match: {failures.0.status: 409} - match: {failures.0.cause.type: version_conflict_engine_exception} @@ -145,10 +143,6 @@ --- "Response for version conflict (seq no powered)": - - skip: - version: " - 6.6.99" - reason: reindex moved to rely on sequence numbers for concurrency control - - do: indices.create: index: test @@ -181,7 +175,6 @@ - match: {version_conflicts: 1} - match: {batches: 1} - match: {failures.0.index: test} - - match: {failures.0.type: _doc} - match: {failures.0.id: "1"} - match: {failures.0.status: 409} - match: {failures.0.cause.type: version_conflict_engine_exception} @@ -210,7 +203,6 @@ - do: index: index: test - type: _doc id: 1 body: { "text": "test" } - do: @@ -219,7 +211,6 @@ - do: index: index: test - type: _doc id: 1 body: { "text": "test2" } diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml index b06cd2325571a..15e2397099b65 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml @@ -285,7 +285,7 @@ indices.refresh: {} - do: - catch: /\[test\]\[_doc\]\[1\] didn't store _source/ + catch: /\[test\]\[1\] didn't store _source/ reindex: body: source: @@ -306,9 +306,9 @@ index: dest --- -"unwhitelisted remote host fails": +"unallowlisted remote host fails": - do: - catch: /\[badremote:9200\] not whitelisted in reindex.remote.whitelist/ + catch: /\[badremote:9200\] not allowlisted in reindex.remote.allowlist/ reindex: body: source: diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml index 770f372c210a8..9c38b13bb1ff0 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml @@ -399,9 +399,9 @@ mget: body: docs: - - { _index: index2, _type: _doc, _id: en_123} - - { _index: index2, _type: _doc, _id: en_456} - - { _index: index2, _type: _doc, _id: fr_789} + - { _index: index2, _id: en_123} + - { _index: index2, _id: en_456} + - { _index: index2, _id: fr_789} - is_true: docs.0.found - match: { docs.0._index: index2 } diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/10_basic.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/10_basic.yml index f17b59e5806fe..4df12b31a0bed 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/10_basic.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/10_basic.yml @@ -104,7 +104,6 @@ - match: {version_conflicts: 1} - match: {batches: 1} - match: {failures.0.index: test} - - match: {failures.0.type: _doc} - match: {failures.0.id: "1"} - match: {failures.0.status: 409} - match: {failures.0.cause.type: version_conflict_engine_exception} @@ -116,9 +115,6 @@ --- "Response for version conflict (seq no powered)": - - skip: - version: " - 6.6.99" - reason: reindex moved to rely on sequence numbers for concurrency control - do: indices.create: index: test @@ -147,7 +143,6 @@ - match: {version_conflicts: 1} - match: {batches: 1} - match: {failures.0.index: test} - - match: {failures.0.type: _doc} - match: {failures.0.id: "1"} - match: {failures.0.status: 409} - match: {failures.0.cause.type: version_conflict_engine_exception} @@ -167,7 +162,6 @@ - do: index: index: test - type: _doc id: 1 body: { "text": "test" } - do: @@ -176,7 +170,6 @@ - do: index: index: test - type: _doc id: 1 body: { "text": "test2" } diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/20_validation.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/20_validation.yml index c015b1a21c398..7b00fb59b02b2 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/20_validation.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/20_validation.yml @@ -150,7 +150,7 @@ indices.refresh: {} - do: - catch: /\[test\]\[_doc\]\[1\] didn't store _source/ + catch: /\[test\]\[1\] didn't store _source/ update_by_query: index: test diff --git a/modules/repository-url/src/main/java/org/opensearch/repositories/url/URLRepository.java b/modules/repository-url/src/main/java/org/opensearch/repositories/url/URLRepository.java index 041550b70a60b..837a30555e127 100644 --- a/modules/repository-url/src/main/java/org/opensearch/repositories/url/URLRepository.java +++ b/modules/repository-url/src/main/java/org/opensearch/repositories/url/URLRepository.java @@ -95,7 +95,7 @@ public class URLRepository extends BlobStoreRepository { private final List supportedProtocols; - private final URIPattern[] urlWhiteList; + private final URIPattern[] urlAllowList; private final Environment environment; @@ -120,7 +120,7 @@ public URLRepository( } this.environment = environment; supportedProtocols = SUPPORTED_PROTOCOLS_SETTING.get(environment.settings()); - urlWhiteList = ALLOWED_URLS_SETTING.get(environment.settings()).toArray(new URIPattern[] {}); + urlAllowList = ALLOWED_URLS_SETTING.get(environment.settings()).toArray(new URIPattern[] {}); basePath = BlobPath.cleanPath(); url = URL_SETTING.exists(metadata.settings()) ? URL_SETTING.get(metadata.settings()) @@ -161,7 +161,7 @@ private URL checkURL(URL url) { for (String supportedProtocol : supportedProtocols) { if (supportedProtocol.equals(protocol)) { try { - if (URIPattern.match(urlWhiteList, url.toURI())) { + if (URIPattern.match(urlAllowList, url.toURI())) { // URL matches white list - no additional processing is needed return url; } diff --git a/modules/transport-netty4/build.gradle b/modules/transport-netty4/build.gradle index 2e62fdd697ec9..f0029837c7d03 100644 --- a/modules/transport-netty4/build.gradle +++ b/modules/transport-netty4/build.gradle @@ -149,10 +149,7 @@ thirdPartyAudit { // from io.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty) 'org.bouncycastle.cert.X509v3CertificateBuilder', 'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter', - 'org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder', - 'org.bouncycastle.jce.provider.BouncyCastleProvider', 'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder', - 'org.bouncycastle.asn1.x500.X500Name', // from io.netty.handler.ssl.JettyNpnSslEngine (netty) 'org.eclipse.jetty.npn.NextProtoNego$ClientProvider', diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java index e95a730c2b755..08df9259d475f 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java @@ -94,7 +94,7 @@ public void testLimitsInFlightRequests() throws Exception { List> requests = new ArrayList<>(); for (int i = 0; i < 150; i++) { - requests.add(Tuple.tuple("/index/type/_bulk", bulkRequest)); + requests.add(Tuple.tuple("/index/_bulk", bulkRequest)); } HttpServerTransport httpServerTransport = internalCluster().getInstance(HttpServerTransport.class); diff --git a/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4HeadBodyIsEmptyIT.java b/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4HeadBodyIsEmptyIT.java index a8fc705363bef..1593488701e26 100644 --- a/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4HeadBodyIsEmptyIT.java +++ b/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4HeadBodyIsEmptyIT.java @@ -86,42 +86,6 @@ public void testIndexExists() throws IOException { headTestCase("/test", singletonMap("pretty", "true"), greaterThan(0)); } - public void testTypeExists() throws IOException { - createTestDoc(); - headTestCase( - "/test/_mapping/_doc", - emptyMap(), - OK.getStatus(), - greaterThan(0), - "Type exists requests are deprecated, as types have been deprecated." - ); - headTestCase( - "/test/_mapping/_doc", - singletonMap("pretty", "true"), - OK.getStatus(), - greaterThan(0), - "Type exists requests are deprecated, as types have been deprecated." - ); - } - - public void testTypeDoesNotExist() throws IOException { - createTestDoc(); - headTestCase( - "/test/_mapping/does-not-exist", - emptyMap(), - NOT_FOUND.getStatus(), - greaterThan(0), - "Type exists requests are deprecated, as types have been deprecated." - ); - headTestCase( - "/text/_mapping/test,does-not-exist", - emptyMap(), - NOT_FOUND.getStatus(), - greaterThan(0), - "Type exists requests are deprecated, as types have been deprecated." - ); - } - public void testAliasExists() throws IOException { createTestDoc(); try (XContentBuilder builder = jsonBuilder()) { diff --git a/plugins/analysis-icu/src/internalClusterTest/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperIT.java b/plugins/analysis-icu/src/internalClusterTest/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperIT.java index b967298b30a41..46b9c45b9cf82 100644 --- a/plugins/analysis-icu/src/internalClusterTest/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperIT.java +++ b/plugins/analysis-icu/src/internalClusterTest/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperIT.java @@ -93,13 +93,12 @@ public void testBasicUsage() throws Exception { // both values should collate to same value indexRandom( true, - client().prepareIndex(index, type, "1").setSource("{\"id\":\"1\",\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) + client().prepareIndex(index).setId("1").setSource("{\"id\":\"1\",\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), + client().prepareIndex(index).setId("2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) ); // searching for either of the terms should return both results since they collate to the same value SearchRequest request = new SearchRequest().indices(index) - .types(type) .source( new SearchSourceBuilder().fetchSource(false) .query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1])) @@ -136,14 +135,14 @@ public void testMultipleValues() throws Exception { // everything should be indexed fine, no exceptions indexRandom( true, - client().prepareIndex(index, type, "1") + client().prepareIndex(index) + .setId("1") .setSource("{\"id\":\"1\", \"collate\":[\"" + equivalent[0] + "\", \"" + equivalent[1] + "\"]}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[2] + "\"}", XContentType.JSON) + client().prepareIndex(index).setId("2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[2] + "\"}", XContentType.JSON) ); // using sort mode = max, values B and C will be used for the sort SearchRequest request = new SearchRequest().indices(index) - .types(type) .source( new SearchSourceBuilder().fetchSource(false) .query(QueryBuilders.termQuery("collate", "a")) @@ -159,7 +158,6 @@ public void testMultipleValues() throws Exception { // same thing, using different sort mode that will use a for both docs request = new SearchRequest().indices(index) - .types(type) .source( new SearchSourceBuilder().fetchSource(false) .query(QueryBuilders.termQuery("collate", "a")) @@ -201,13 +199,12 @@ public void testNormalization() throws Exception { indexRandom( true, - client().prepareIndex(index, type, "1").setSource("{\"id\":\"1\",\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) + client().prepareIndex(index).setId("1").setSource("{\"id\":\"1\",\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), + client().prepareIndex(index).setId("2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) ); // searching for either of the terms should return both results since they collate to the same value SearchRequest request = new SearchRequest().indices(index) - .types(type) .source( new SearchSourceBuilder().fetchSource(false) .query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1])) @@ -248,12 +245,11 @@ public void testSecondaryStrength() throws Exception { indexRandom( true, - client().prepareIndex(index, type, "1").setSource("{\"id\":\"1\",\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) + client().prepareIndex(index).setId("1").setSource("{\"id\":\"1\",\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), + client().prepareIndex(index).setId("2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) ); SearchRequest request = new SearchRequest().indices(index) - .types(type) .source( new SearchSourceBuilder().fetchSource(false) .query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1])) @@ -295,12 +291,11 @@ public void testIgnorePunctuation() throws Exception { indexRandom( true, - client().prepareIndex(index, type, "1").setSource("{\"id\":\"1\",\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) + client().prepareIndex(index).setId("1").setSource("{\"id\":\"1\",\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), + client().prepareIndex(index).setId("2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) ); SearchRequest request = new SearchRequest().indices(index) - .types(type) .source( new SearchSourceBuilder().fetchSource(false) .query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1])) @@ -342,13 +337,12 @@ public void testIgnoreWhitespace() throws Exception { indexRandom( true, - client().prepareIndex(index, type, "1").setSource("{\"id\":\"1\",\"collate\":\"foo bar\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"id\":\"2\",\"collate\":\"foobar\"}", XContentType.JSON), - client().prepareIndex(index, type, "3").setSource("{\"id\":\"3\",\"collate\":\"foo-bar\"}", XContentType.JSON) + client().prepareIndex(index).setId("1").setSource("{\"id\":\"1\",\"collate\":\"foo bar\"}", XContentType.JSON), + client().prepareIndex(index).setId("2").setSource("{\"id\":\"2\",\"collate\":\"foobar\"}", XContentType.JSON), + client().prepareIndex(index).setId("3").setSource("{\"id\":\"3\",\"collate\":\"foo-bar\"}", XContentType.JSON) ); SearchRequest request = new SearchRequest().indices(index) - .types(type) .source( new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC).sort("id", SortOrder.ASC) // secondary sort // should kick in on @@ -386,12 +380,11 @@ public void testNumerics() throws Exception { indexRandom( true, - client().prepareIndex(index, type, "1").setSource("{\"collate\":\"foobar-10\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"collate\":\"foobar-9\"}", XContentType.JSON) + client().prepareIndex(index).setId("1").setSource("{\"collate\":\"foobar-10\"}", XContentType.JSON), + client().prepareIndex(index).setId("2").setSource("{\"collate\":\"foobar-9\"}", XContentType.JSON) ); SearchRequest request = new SearchRequest().indices(index) - .types(type) .source(new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC)); SearchResponse response = client().search(request).actionGet(); @@ -427,14 +420,13 @@ public void testIgnoreAccentsButNotCase() throws Exception { indexRandom( true, - client().prepareIndex(index, type, "1").setSource("{\"id\":\"1\",\"collate\":\"résumé\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"id\":\"2\",\"collate\":\"Resume\"}", XContentType.JSON), - client().prepareIndex(index, type, "3").setSource("{\"id\":\"3\",\"collate\":\"resume\"}", XContentType.JSON), - client().prepareIndex(index, type, "4").setSource("{\"id\":\"4\",\"collate\":\"Résumé\"}", XContentType.JSON) + client().prepareIndex(index).setId("1").setSource("{\"id\":\"1\",\"collate\":\"résumé\"}", XContentType.JSON), + client().prepareIndex(index).setId("2").setSource("{\"id\":\"2\",\"collate\":\"Resume\"}", XContentType.JSON), + client().prepareIndex(index).setId("3").setSource("{\"id\":\"3\",\"collate\":\"resume\"}", XContentType.JSON), + client().prepareIndex(index).setId("4").setSource("{\"id\":\"4\",\"collate\":\"Résumé\"}", XContentType.JSON) ); SearchRequest request = new SearchRequest().indices(index) - .types(type) .source(new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC).sort("id", SortOrder.DESC)); SearchResponse response = client().search(request).actionGet(); @@ -467,12 +459,11 @@ public void testUpperCaseFirst() throws Exception { indexRandom( true, - client().prepareIndex(index, type, "1").setSource("{\"collate\":\"resume\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"collate\":\"Resume\"}", XContentType.JSON) + client().prepareIndex(index).setId("1").setSource("{\"collate\":\"resume\"}", XContentType.JSON), + client().prepareIndex(index).setId("2").setSource("{\"collate\":\"Resume\"}", XContentType.JSON) ); SearchRequest request = new SearchRequest().indices(index) - .types(type) .source(new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC)); SearchResponse response = client().search(request).actionGet(); @@ -517,12 +508,11 @@ public void testCustomRules() throws Exception { indexRandom( true, - client().prepareIndex(index, type, "1").setSource("{\"id\":\"1\",\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) + client().prepareIndex(index).setId("1").setSource("{\"id\":\"1\",\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), + client().prepareIndex(index).setId("2").setSource("{\"id\":\"2\",\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) ); SearchRequest request = new SearchRequest().indices(index) - .types(type) .source( new SearchSourceBuilder().fetchSource(false) .query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1])) diff --git a/plugins/analysis-ukrainian/build.gradle b/plugins/analysis-ukrainian/build.gradle index e7ba996587e22..9e4bb9c647859 100644 --- a/plugins/analysis-ukrainian/build.gradle +++ b/plugins/analysis-ukrainian/build.gradle @@ -36,9 +36,9 @@ opensearchplugin { dependencies { api "org.apache.lucene:lucene-analyzers-morfologik:${versions.lucene}" - api "org.carrot2:morfologik-stemming:2.1.1" - api "org.carrot2:morfologik-fsa:2.1.1" - api "ua.net.nlp:morfologik-ukrainian-search:3.7.5" + api "org.carrot2:morfologik-stemming:2.1.8" + api "org.carrot2:morfologik-fsa:2.1.8" + api "ua.net.nlp:morfologik-ukrainian-search:4.9.1" } restResources { diff --git a/plugins/analysis-ukrainian/licenses/morfologik-fsa-2.1.1.jar.sha1 b/plugins/analysis-ukrainian/licenses/morfologik-fsa-2.1.1.jar.sha1 deleted file mode 100644 index 07d523ec0c82b..0000000000000 --- a/plugins/analysis-ukrainian/licenses/morfologik-fsa-2.1.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -87866deba6aa5d19956fbe3406d8ddb5f19f5352 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/morfologik-fsa-2.1.8.jar.sha1 b/plugins/analysis-ukrainian/licenses/morfologik-fsa-2.1.8.jar.sha1 new file mode 100644 index 0000000000000..0b81b8051a3ba --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/morfologik-fsa-2.1.8.jar.sha1 @@ -0,0 +1 @@ +68e23e2c57fe5699d511b3a7a2f202f90020e214 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/morfologik-stemming-2.1.1.jar.sha1 b/plugins/analysis-ukrainian/licenses/morfologik-stemming-2.1.1.jar.sha1 deleted file mode 100644 index 22af41d2b6b1b..0000000000000 --- a/plugins/analysis-ukrainian/licenses/morfologik-stemming-2.1.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5c169bab2e7dd04f5cb03d179a73a4339cc1d0a2 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/morfologik-stemming-2.1.8.jar.sha1 b/plugins/analysis-ukrainian/licenses/morfologik-stemming-2.1.8.jar.sha1 new file mode 100644 index 0000000000000..6dfcc82f05b39 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/morfologik-stemming-2.1.8.jar.sha1 @@ -0,0 +1 @@ +409fa92db4cfb0f90a33d303732a4882cee3d1e7 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/morfologik-ukrainian-search-3.7.5.jar.sha1 b/plugins/analysis-ukrainian/licenses/morfologik-ukrainian-search-3.7.5.jar.sha1 deleted file mode 100644 index 446e7a91161a8..0000000000000 --- a/plugins/analysis-ukrainian/licenses/morfologik-ukrainian-search-3.7.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2b8c8fbd740164d220ca7d18605b8b2092e163e9 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/morfologik-ukrainian-search-4.9.1.jar.sha1 b/plugins/analysis-ukrainian/licenses/morfologik-ukrainian-search-4.9.1.jar.sha1 new file mode 100644 index 0000000000000..31035a1593bbc --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/morfologik-ukrainian-search-4.9.1.jar.sha1 @@ -0,0 +1 @@ +98541e2d3e95d69244829c2855b10686b344c3b3 \ No newline at end of file diff --git a/plugins/discovery-azure-classic/build.gradle b/plugins/discovery-azure-classic/build.gradle index 8aac387cea6bf..968f4efb3fa1e 100644 --- a/plugins/discovery-azure-classic/build.gradle +++ b/plugins/discovery-azure-classic/build.gradle @@ -54,7 +54,7 @@ dependencies { api "commons-codec:commons-codec:${versions.commonscodec}" api "commons-lang:commons-lang:2.6" api "commons-io:commons-io:2.7" - api 'javax.mail:mail:1.4.5' + api 'javax.mail:mail:1.4.7' api 'javax.inject:javax.inject:1' api "com.sun.jersey:jersey-client:${versions.jersey}" api "com.sun.jersey:jersey-core:${versions.jersey}" @@ -63,8 +63,8 @@ dependencies { api 'com.sun.xml.bind:jaxb-impl:2.2.3-1' // HACK: javax.xml.bind was removed from default modules in java 9, so we pull the api in here, - // and whitelist this hack in JarHell - api 'javax.xml.bind:jaxb-api:2.2.2' + // and allowlist this hack in JarHell + api 'javax.xml.bind:jaxb-api:2.3.1' } restResources { diff --git a/plugins/discovery-azure-classic/licenses/jaxb-api-2.2.2.jar.sha1 b/plugins/discovery-azure-classic/licenses/jaxb-api-2.2.2.jar.sha1 deleted file mode 100644 index a37e187238933..0000000000000 --- a/plugins/discovery-azure-classic/licenses/jaxb-api-2.2.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -aeb3021ca93dde265796d82015beecdcff95bf09 \ No newline at end of file diff --git a/plugins/discovery-azure-classic/licenses/jaxb-api-2.3.1.jar.sha1 b/plugins/discovery-azure-classic/licenses/jaxb-api-2.3.1.jar.sha1 new file mode 100644 index 0000000000000..f4434214e1eec --- /dev/null +++ b/plugins/discovery-azure-classic/licenses/jaxb-api-2.3.1.jar.sha1 @@ -0,0 +1 @@ +8531ad5ac454cc2deb9d4d32c40c4d7451939b5d \ No newline at end of file diff --git a/plugins/discovery-azure-classic/licenses/mail-1.4.5.jar.sha1 b/plugins/discovery-azure-classic/licenses/mail-1.4.5.jar.sha1 deleted file mode 100644 index b79503e0c69d9..0000000000000 --- a/plugins/discovery-azure-classic/licenses/mail-1.4.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -85319c87280f30e1afc54c355f91f44741beac49 diff --git a/plugins/discovery-azure-classic/licenses/mail-1.4.7.jar.sha1 b/plugins/discovery-azure-classic/licenses/mail-1.4.7.jar.sha1 new file mode 100644 index 0000000000000..0b9ba0ce9f186 --- /dev/null +++ b/plugins/discovery-azure-classic/licenses/mail-1.4.7.jar.sha1 @@ -0,0 +1 @@ +9add058589d5d85adeb625859bf2c5eeaaedf12d \ No newline at end of file diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index a6d4134d15a9b..7998e0861c7b1 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -134,17 +134,8 @@ tasks.named("thirdPartyAudit").configure { ignoreMissingClasses( // classes are missing 'javax.jms.Message', - 'com.amazonaws.jmespath.JmesPathEvaluationVisitor', 'com.amazonaws.jmespath.JmesPathExpression', - 'com.amazonaws.jmespath.JmesPathField', - 'com.amazonaws.jmespath.JmesPathFlatten', - 'com.amazonaws.jmespath.JmesPathIdentity', - 'com.amazonaws.jmespath.JmesPathLengthFunction', - 'com.amazonaws.jmespath.JmesPathLiteral', - 'com.amazonaws.jmespath.JmesPathProjection', - 'com.amazonaws.jmespath.JmesPathSubExpression', 'com.amazonaws.jmespath.ObjectMapperSingleton', - 'com.amazonaws.jmespath.OpGreaterThan', 'software.amazon.ion.IonReader', 'software.amazon.ion.IonSystem', 'software.amazon.ion.IonType', diff --git a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AmazonEc2Reference.java b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AmazonEc2Reference.java index eac46356d9127..2686c376213f3 100644 --- a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AmazonEc2Reference.java +++ b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AmazonEc2Reference.java @@ -33,42 +33,15 @@ package org.opensearch.discovery.ec2; import com.amazonaws.services.ec2.AmazonEC2; - -import org.opensearch.common.lease.Releasable; -import org.opensearch.common.util.concurrent.AbstractRefCounted; +import org.opensearch.common.concurrent.RefCountedReleasable; /** * Handles the shutdown of the wrapped {@link AmazonEC2} using reference * counting. */ -public class AmazonEc2Reference extends AbstractRefCounted implements Releasable { - - private final AmazonEC2 client; +public class AmazonEc2Reference extends RefCountedReleasable { AmazonEc2Reference(AmazonEC2 client) { - super("AWS_EC2_CLIENT"); - this.client = client; + super("AWS_EC2_CLIENT", client, client::shutdown); } - - /** - * Call when the client is not needed anymore. - */ - @Override - public void close() { - decRef(); - } - - /** - * Returns the underlying `AmazonEC2` client. All method calls are permitted BUT - * NOT shutdown. Shutdown is called when reference count reaches 0. - */ - public AmazonEC2 client() { - return client; - } - - @Override - protected void closeInternal() { - client.shutdown(); - } - } diff --git a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2SeedHostsProvider.java b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2SeedHostsProvider.java index 4b36a60bb278f..f26ecfab501f8 100644 --- a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2SeedHostsProvider.java +++ b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2SeedHostsProvider.java @@ -129,7 +129,7 @@ protected List fetchDynamicNodes() { // NOTE: we don't filter by security group during the describe instances request for two reasons: // 1. differences in VPCs require different parameters during query (ID vs Name) // 2. We want to use two different strategies: (all security groups vs. any security groups) - descInstances = SocketAccess.doPrivileged(() -> clientReference.client().describeInstances(buildDescribeInstancesRequest())); + descInstances = SocketAccess.doPrivileged(() -> clientReference.get().describeInstances(buildDescribeInstancesRequest())); } catch (final AmazonClientException e) { logger.info("Exception while retrieving instance list from AWS API: {}", e.getMessage()); logger.debug("Full exception:", e); diff --git a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryPluginTests.java b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryPluginTests.java index be6261583bdd1..cb19c0d4255ac 100644 --- a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryPluginTests.java +++ b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryPluginTests.java @@ -103,7 +103,7 @@ public void testNodeAttributesErrorLenient() throws Exception { public void testDefaultEndpoint() throws IOException { try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(Settings.EMPTY)) { - final String endpoint = ((AmazonEC2Mock) plugin.ec2Service.client().client()).endpoint; + final String endpoint = ((AmazonEC2Mock) plugin.ec2Service.client().get()).endpoint; assertThat(endpoint, is("")); } } @@ -111,7 +111,7 @@ public void testDefaultEndpoint() throws IOException { public void testSpecificEndpoint() throws IOException { final Settings settings = Settings.builder().put(Ec2ClientSettings.ENDPOINT_SETTING.getKey(), "ec2.endpoint").build(); try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(settings)) { - final String endpoint = ((AmazonEC2Mock) plugin.ec2Service.client().client()).endpoint; + final String endpoint = ((AmazonEC2Mock) plugin.ec2Service.client().get()).endpoint; assertThat(endpoint, is("ec2.endpoint")); } } @@ -150,7 +150,7 @@ public void testClientSettingsReInit() throws IOException { try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(settings1)) { try (AmazonEc2Reference clientReference = plugin.ec2Service.client()) { { - final AWSCredentials credentials = ((AmazonEC2Mock) clientReference.client()).credentials.getCredentials(); + final AWSCredentials credentials = ((AmazonEC2Mock) clientReference.get()).credentials.getCredentials(); assertThat(credentials.getAWSAccessKeyId(), is("ec2_access_1")); assertThat(credentials.getAWSSecretKey(), is("ec2_secret_1")); if (mockSecure1HasSessionToken) { @@ -159,32 +159,32 @@ public void testClientSettingsReInit() throws IOException { } else { assertThat(credentials, instanceOf(BasicAWSCredentials.class)); } - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(881)); - assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyUsername(), is("proxy_username_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyPassword(), is("proxy_password_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyHost(), is("proxy_host_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyPort(), is(881)); + assertThat(((AmazonEC2Mock) clientReference.get()).endpoint, is("ec2_endpoint_1")); } // reload secure settings2 plugin.reload(settings2); // client is not released, it is still using the old settings { - final AWSCredentials credentials = ((AmazonEC2Mock) clientReference.client()).credentials.getCredentials(); + final AWSCredentials credentials = ((AmazonEC2Mock) clientReference.get()).credentials.getCredentials(); if (mockSecure1HasSessionToken) { assertThat(credentials, instanceOf(BasicSessionCredentials.class)); assertThat(((BasicSessionCredentials) credentials).getSessionToken(), is("ec2_session_token_1")); } else { assertThat(credentials, instanceOf(BasicAWSCredentials.class)); } - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(881)); - assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyUsername(), is("proxy_username_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyPassword(), is("proxy_password_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyHost(), is("proxy_host_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyPort(), is(881)); + assertThat(((AmazonEC2Mock) clientReference.get()).endpoint, is("ec2_endpoint_1")); } } try (AmazonEc2Reference clientReference = plugin.ec2Service.client()) { - final AWSCredentials credentials = ((AmazonEC2Mock) clientReference.client()).credentials.getCredentials(); + final AWSCredentials credentials = ((AmazonEC2Mock) clientReference.get()).credentials.getCredentials(); assertThat(credentials.getAWSAccessKeyId(), is("ec2_access_2")); assertThat(credentials.getAWSSecretKey(), is("ec2_secret_2")); if (mockSecure2HasSessionToken) { @@ -193,11 +193,11 @@ public void testClientSettingsReInit() throws IOException { } else { assertThat(credentials, instanceOf(BasicAWSCredentials.class)); } - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_2")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_2")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_2")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(882)); - assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_2")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyUsername(), is("proxy_username_2")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyPassword(), is("proxy_password_2")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyHost(), is("proxy_host_2")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyPort(), is(882)); + assertThat(((AmazonEC2Mock) clientReference.get()).endpoint, is("ec2_endpoint_2")); } } } diff --git a/plugins/examples/painless-whitelist/build.gradle b/plugins/examples/painless-whitelist/build.gradle index 61888efbcf1df..70052c209ab61 100644 --- a/plugins/examples/painless-whitelist/build.gradle +++ b/plugins/examples/painless-whitelist/build.gradle @@ -32,7 +32,7 @@ apply plugin: 'opensearch.yaml-rest-test' opensearchplugin { name 'painless-whitelist' - description 'An example whitelisting additional classes and methods in painless' + description 'An example allowlisting additional classes and methods in painless' classname 'org.opensearch.example.painlesswhitelist.MyWhitelistPlugin' extendedPlugins = ['lang-painless'] licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt') diff --git a/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/ExampleWhitelistExtension.java b/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/ExampleWhitelistExtension.java index 471c28ea445d0..74adcf5e4f57a 100644 --- a/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/ExampleWhitelistExtension.java +++ b/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/ExampleWhitelistExtension.java @@ -46,19 +46,19 @@ import java.util.List; import java.util.Map; -/** An extension of painless which adds a whitelist. */ +/** An extension of painless which adds an allowlist. */ public class ExampleWhitelistExtension implements PainlessExtension { @Override public Map, List> getContextWhitelists() { Map parsers = new HashMap<>(WhitelistAnnotationParser.BASE_ANNOTATION_PARSERS); parsers.put(ExamplePainlessAnnotation.NAME, ExampleWhitelistAnnotationParser.INSTANCE); - Whitelist classWhitelist = WhitelistLoader.loadFromResourceFiles(ExampleWhitelistExtension.class, parsers, "example_whitelist.txt"); + Whitelist classAllowlist = WhitelistLoader.loadFromResourceFiles(ExampleWhitelistExtension.class, parsers, "example_whitelist.txt"); - ExampleWhitelistedInstance ewi = new ExampleWhitelistedInstance(1); + ExampleWhitelistedInstance eai = new ExampleWhitelistedInstance(1); WhitelistInstanceBinding addValue = new WhitelistInstanceBinding( "example addValue", - ewi, + eai, "addValue", "int", Collections.singletonList("int"), @@ -66,20 +66,20 @@ public Map, List> getContextWhitelists() { ); WhitelistInstanceBinding getValue = new WhitelistInstanceBinding( "example getValue", - ewi, + eai, "getValue", "int", Collections.emptyList(), Collections.emptyList() ); - Whitelist instanceWhitelist = new Whitelist( - ewi.getClass().getClassLoader(), + Whitelist instanceAllowlist = new Whitelist( + eai.getClass().getClassLoader(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Arrays.asList(addValue, getValue) ); - return Collections.singletonMap(FieldScript.CONTEXT, Arrays.asList(classWhitelist, instanceWhitelist)); + return Collections.singletonMap(FieldScript.CONTEXT, Arrays.asList(classAllowlist, instanceAllowlist)); } } diff --git a/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/ExampleWhitelistedClass.java b/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/ExampleWhitelistedClass.java index 1798375686d12..5832a2ee59a85 100644 --- a/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/ExampleWhitelistedClass.java +++ b/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/ExampleWhitelistedClass.java @@ -33,9 +33,9 @@ package org.opensearch.example.painlesswhitelist; /** - * An example of a class to be whitelisted for use by painless scripts + * An example of a class to be allowlisted for use by painless scripts * - * Each of the members and methods below are whitelisted for use in search scripts. + * Each of the members and methods below are allowlisted for use in search scripts. * See example_whitelist.txt. */ public class ExampleWhitelistedClass { @@ -68,7 +68,7 @@ public static int toInt(String x) { return Integer.parseInt(x); } - // example method to attach annotations in whitelist + // example method to attach annotations in allowlist public void annotate() { // some logic here } diff --git a/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/MyWhitelistPlugin.java b/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/MyWhitelistPlugin.java index 38a95545c46cb..ab6ba53e4039f 100644 --- a/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/MyWhitelistPlugin.java +++ b/plugins/examples/painless-whitelist/src/main/java/org/opensearch/example/painlesswhitelist/MyWhitelistPlugin.java @@ -35,5 +35,5 @@ import org.opensearch.plugins.Plugin; public class MyWhitelistPlugin extends Plugin { - // we don't actually need anything here, since whitelists are extended through SPI + // we don't actually need anything here, since allowlists are extended through SPI } diff --git a/plugins/examples/painless-whitelist/src/main/resources/org/opensearch/example/painlesswhitelist/example_whitelist.txt b/plugins/examples/painless-whitelist/src/main/resources/org/opensearch/example/painlesswhitelist/example_whitelist.txt index 5c6c605c7c28a..8f2ccaf05f2f3 100644 --- a/plugins/examples/painless-whitelist/src/main/resources/org/opensearch/example/painlesswhitelist/example_whitelist.txt +++ b/plugins/examples/painless-whitelist/src/main/resources/org/opensearch/example/painlesswhitelist/example_whitelist.txt @@ -17,7 +17,7 @@ # under the License. # -# This file contains a whitelist for an example class which may be access from painless +# This file contains an allowlist for an example class which may be access from painless class org.opensearch.example.painlesswhitelist.ExampleWhitelistedClass { # constructor diff --git a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/10_basic.yml b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/10_basic.yml index 1b8870582375d..cc3762eb42d68 100644 --- a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/10_basic.yml +++ b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/10_basic.yml @@ -1,4 +1,4 @@ -# Integration tests for the painless whitelist example plugin +# Integration tests for the painless allowlist example plugin # "Plugin loaded": - skip: diff --git a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/20_whitelist.yml b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/20_whitelist.yml index 51a440142fd5e..92289af179278 100644 --- a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/20_whitelist.yml +++ b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/20_whitelist.yml @@ -1,4 +1,4 @@ -# Example test using whitelisted members and methods +# Example test using allowlisted members and methods "Whitelisted custom class": - do: diff --git a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/30_static.yml b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/30_static.yml index c6d8048b97961..447e1c2a8271f 100644 --- a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/30_static.yml +++ b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/30_static.yml @@ -1,4 +1,4 @@ -# Example test using whitelisted statically imported method +# Example test using allowlisted statically imported method "custom static imported method": - do: diff --git a/plugins/mapper-annotated-text/src/test/java/org/opensearch/index/mapper/annotatedtext/AnnotatedTextFieldTypeTests.java b/plugins/mapper-annotated-text/src/test/java/org/opensearch/index/mapper/annotatedtext/AnnotatedTextFieldTypeTests.java index af94bcfa79367..3e3119094cb69 100644 --- a/plugins/mapper-annotated-text/src/test/java/org/opensearch/index/mapper/annotatedtext/AnnotatedTextFieldTypeTests.java +++ b/plugins/mapper-annotated-text/src/test/java/org/opensearch/index/mapper/annotatedtext/AnnotatedTextFieldTypeTests.java @@ -42,6 +42,7 @@ import org.opensearch.index.mapper.FieldTypeTestCase; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.Mapper; +import org.opensearch.index.query.IntervalMode; import java.io.IOException; import java.util.Collections; @@ -51,7 +52,7 @@ public class AnnotatedTextFieldTypeTests extends FieldTypeTestCase { public void testIntervals() throws IOException { MappedFieldType ft = new AnnotatedTextFieldMapper.AnnotatedTextFieldType("field", Collections.emptyMap()); NamedAnalyzer a = new NamedAnalyzer("name", AnalyzerScope.INDEX, new StandardAnalyzer()); - IntervalsSource source = ft.intervals("Donald Trump", 0, true, a, false); + IntervalsSource source = ft.intervals("Donald Trump", 0, IntervalMode.ORDERED, a, false); assertEquals(Intervals.phrase(Intervals.term("donald"), Intervals.term("trump")), source); } diff --git a/plugins/mapper-annotated-text/src/yamlRestTest/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml b/plugins/mapper-annotated-text/src/yamlRestTest/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml index be3b32e6338dc..b4acccf36879d 100644 --- a/plugins/mapper-annotated-text/src/yamlRestTest/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml +++ b/plugins/mapper-annotated-text/src/yamlRestTest/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml @@ -3,10 +3,6 @@ --- "annotated highlighter on annotated text": - - skip: - version: " - 6.4.99" - reason: Annotated text type introduced in 6.5.0 - - do: indices.create: index: annotated @@ -80,10 +76,6 @@ --- "issue 39395 thread safety issue -requires multiple calls to reveal": - - skip: - version: " - 6.4.99" - reason: Annotated text type introduced in 6.5.0 - - do: indices.create: index: annotated diff --git a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingIT.java b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingIT.java index 4811c7d12759c..2cf05da26c193 100644 --- a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingIT.java +++ b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingIT.java @@ -36,6 +36,7 @@ import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.index.mapper.MapperService; import org.opensearch.plugin.mapper.MapperSizePlugin; import org.opensearch.plugins.Plugin; import org.opensearch.test.OpenSearchIntegTestCase; @@ -62,13 +63,13 @@ protected Collection> nodePlugins() { // issue 5053 public void testThatUpdatingMappingShouldNotRemoveSizeMappingConfiguration() throws Exception { String index = "foo"; - String type = "mytype"; + String type = MapperService.SINGLE_MAPPING_NAME; XContentBuilder builder = jsonBuilder().startObject().startObject("_size").field("enabled", true).endObject().endObject(); assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); // check mapping again - assertSizeMappingEnabled(index, type, true); + assertSizeMappingEnabled(index, true); // update some field in the mapping XContentBuilder updateMappingBuilder = jsonBuilder().startObject() @@ -78,27 +79,22 @@ public void testThatUpdatingMappingShouldNotRemoveSizeMappingConfiguration() thr .endObject() .endObject() .endObject(); - AcknowledgedResponse putMappingResponse = client().admin() - .indices() - .preparePutMapping(index) - .setType(type) - .setSource(updateMappingBuilder) - .get(); + AcknowledgedResponse putMappingResponse = client().admin().indices().preparePutMapping(index).setSource(updateMappingBuilder).get(); assertAcked(putMappingResponse); // make sure size field is still in mapping - assertSizeMappingEnabled(index, type, true); + assertSizeMappingEnabled(index, true); } public void testThatSizeCanBeSwitchedOnAndOff() throws Exception { String index = "foo"; - String type = "mytype"; + String type = MapperService.SINGLE_MAPPING_NAME; XContentBuilder builder = jsonBuilder().startObject().startObject("_size").field("enabled", true).endObject().endObject(); assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); // check mapping again - assertSizeMappingEnabled(index, type, true); + assertSizeMappingEnabled(index, true); // update some field in the mapping XContentBuilder updateMappingBuilder = jsonBuilder().startObject() @@ -106,27 +102,21 @@ public void testThatSizeCanBeSwitchedOnAndOff() throws Exception { .field("enabled", false) .endObject() .endObject(); - AcknowledgedResponse putMappingResponse = client().admin() - .indices() - .preparePutMapping(index) - .setType(type) - .setSource(updateMappingBuilder) - .get(); + AcknowledgedResponse putMappingResponse = client().admin().indices().preparePutMapping(index).setSource(updateMappingBuilder).get(); assertAcked(putMappingResponse); // make sure size field is still in mapping - assertSizeMappingEnabled(index, type, false); + assertSizeMappingEnabled(index, false); } - private void assertSizeMappingEnabled(String index, String type, boolean enabled) throws IOException { + private void assertSizeMappingEnabled(String index, boolean enabled) throws IOException { String errMsg = String.format( Locale.ROOT, - "Expected size field mapping to be " + (enabled ? "enabled" : "disabled") + " for %s/%s", - index, - type + "Expected size field mapping to be " + (enabled ? "enabled" : "disabled") + " for %s", + index ); - GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings(index).addTypes(type).get(); - Map mappingSource = getMappingsResponse.getMappings().get(index).get(type).getSourceAsMap(); + GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings(index).get(); + Map mappingSource = getMappingsResponse.getMappings().get(index).getSourceAsMap(); assertThat(errMsg, mappingSource, hasKey("_size")); String sizeAsString = mappingSource.get("_size").toString(); assertThat(sizeAsString, is(notNullValue())); @@ -134,10 +124,10 @@ private void assertSizeMappingEnabled(String index, String type, boolean enabled } public void testBasic() throws Exception { - assertAcked(prepareCreate("test").addMapping("type", "_size", "enabled=true")); + assertAcked(prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME, "_size", "enabled=true")); final String source = "{\"f\":10}"; - indexRandom(true, client().prepareIndex("test", "type", "1").setSource(source, XContentType.JSON)); - GetResponse getResponse = client().prepareGet("test", "type", "1").setStoredFields("_size").get(); + indexRandom(true, client().prepareIndex("test").setId("1").setSource(source, XContentType.JSON)); + GetResponse getResponse = client().prepareGet("test", "1").setStoredFields("_size").get(); assertNotNull(getResponse.getField("_size")); assertEquals(source.length(), (int) getResponse.getField("_size").getValue()); } diff --git a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java index e39439c1a3b4f..4e4648a87fbfc 100644 --- a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java +++ b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java @@ -63,10 +63,10 @@ protected Collection> getPlugins() { public void testSizeEnabled() throws Exception { IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=true"); - DocumentMapper docMapper = service.mapperService().documentMapper("type"); + DocumentMapper docMapper = service.mapperService().documentMapper(); BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()); - ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", source, XContentType.JSON)); + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1", source, XContentType.JSON)); boolean stored = false; boolean points = false; @@ -80,27 +80,27 @@ public void testSizeEnabled() throws Exception { public void testSizeDisabled() throws Exception { IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=false"); - DocumentMapper docMapper = service.mapperService().documentMapper("type"); + DocumentMapper docMapper = service.mapperService().documentMapper(); BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()); - ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", source, XContentType.JSON)); + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1", source, XContentType.JSON)); assertThat(doc.rootDoc().getField("_size"), nullValue()); } public void testSizeNotSet() throws Exception { - IndexService service = createIndex("test", Settings.EMPTY, "type"); - DocumentMapper docMapper = service.mapperService().documentMapper("type"); + IndexService service = createIndex("test", Settings.EMPTY, MapperService.SINGLE_MAPPING_NAME); + DocumentMapper docMapper = service.mapperService().documentMapper(); BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()); - ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", source, XContentType.JSON)); + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1", source, XContentType.JSON)); assertThat(doc.rootDoc().getField("_size"), nullValue()); } public void testThatDisablingWorksWhenMerging() throws Exception { IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=true"); - DocumentMapper docMapper = service.mapperService().documentMapper("type"); + DocumentMapper docMapper = service.mapperService().documentMapper(); assertThat(docMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(true)); String disabledMapping = Strings.toString( diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index 81ef4e98923a3..60fb99f459454 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -44,9 +44,9 @@ opensearchplugin { } dependencies { - api 'com.azure:azure-core:1.22.0' - api 'com.azure:azure-storage-common:12.14.0' - api 'com.azure:azure-core-http-netty:1.11.2' + api 'com.azure:azure-core:1.26.0' + api 'com.azure:azure-storage-common:12.15.0' + api 'com.azure:azure-core-http-netty:1.11.8' api "io.netty:netty-codec-dns:${versions.netty}" api "io.netty:netty-codec-socks:${versions.netty}" api "io.netty:netty-codec-http2:${versions.netty}" @@ -54,12 +54,12 @@ dependencies { api "io.netty:netty-resolver-dns:${versions.netty}" api "io.netty:netty-transport-native-unix-common:${versions.netty}" implementation project(':modules:transport-netty4') - api 'com.azure:azure-storage-blob:12.14.1' + api 'com.azure:azure-storage-blob:12.14.4' api 'org.reactivestreams:reactive-streams:1.0.3' - api 'io.projectreactor:reactor-core:3.4.11' - api 'io.projectreactor.netty:reactor-netty:1.0.13' - api 'io.projectreactor.netty:reactor-netty-core:1.0.13' - api 'io.projectreactor.netty:reactor-netty-http:1.0.13' + api 'io.projectreactor:reactor-core:3.4.15' + api 'io.projectreactor.netty:reactor-netty:1.0.16' + api 'io.projectreactor.netty:reactor-netty-core:1.0.16' + api 'io.projectreactor.netty:reactor-netty-http:1.0.16' api "org.slf4j:slf4j-api:${versions.slf4j}" api "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}" api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}" @@ -67,9 +67,9 @@ dependencies { api "com.fasterxml.jackson.dataformat:jackson-dataformat-xml:${versions.jackson}" api "com.fasterxml.jackson.module:jackson-module-jaxb-annotations:${versions.jackson}" api 'org.codehaus.woodstox:stax2-api:4.2.1' - implementation 'com.fasterxml.woodstox:woodstox-core:6.1.1' - runtimeOnly 'com.google.guava:guava:30.1.1-jre' - api 'org.apache.commons:commons-lang3:3.4' + implementation 'com.fasterxml.woodstox:woodstox-core:6.2.8' + runtimeOnly 'com.google.guava:guava:31.1-jre' + api 'org.apache.commons:commons-lang3:3.12.0' testImplementation project(':test:fixtures:azure-fixture') } @@ -119,25 +119,16 @@ thirdPartyAudit { 'io.micrometer.core.instrument.search.Search', 'io.netty.channel.epoll.Epoll', 'io.netty.channel.epoll.EpollDatagramChannel', - 'io.netty.channel.epoll.EpollDomainDatagramChannel', - 'io.netty.channel.epoll.EpollDomainSocketChannel', - 'io.netty.channel.epoll.EpollEventLoopGroup', - 'io.netty.channel.epoll.EpollServerDomainSocketChannel', 'io.netty.channel.epoll.EpollServerSocketChannel', 'io.netty.channel.epoll.EpollSocketChannel', 'io.netty.channel.kqueue.KQueue', 'io.netty.channel.kqueue.KQueueDatagramChannel', - 'io.netty.channel.kqueue.KQueueDomainDatagramChannel', - 'io.netty.channel.kqueue.KQueueDomainSocketChannel', - 'io.netty.channel.kqueue.KQueueEventLoopGroup', - 'io.netty.channel.kqueue.KQueueServerDomainSocketChannel', 'io.netty.channel.kqueue.KQueueServerSocketChannel', 'io.netty.channel.kqueue.KQueueSocketChannel', 'io.netty.handler.codec.haproxy.HAProxyMessage', 'io.netty.handler.codec.haproxy.HAProxyMessageDecoder', 'io.netty.incubator.channel.uring.IOUring', 'io.netty.incubator.channel.uring.IOUringDatagramChannel', - 'io.netty.incubator.channel.uring.IOUringEventLoopGroup', 'io.netty.incubator.channel.uring.IOUringServerSocketChannel', 'io.netty.incubator.channel.uring.IOUringSocketChannel', 'javax.activation.DataHandler', @@ -167,7 +158,6 @@ thirdPartyAudit { 'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter', 'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter$DEFAULT', 'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapters', - 'kotlin.TypeCastException', 'kotlin.collections.ArraysKt', 'kotlin.jvm.JvmClassMappingKt', 'kotlin.jvm.functions.Function0', diff --git a/plugins/repository-azure/licenses/azure-core-1.22.0.jar.sha1 b/plugins/repository-azure/licenses/azure-core-1.22.0.jar.sha1 deleted file mode 100644 index f57b83e5d9715..0000000000000 --- a/plugins/repository-azure/licenses/azure-core-1.22.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -194b21b804c20c85f7d2a6199280075f6747e188 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-core-1.26.0.jar.sha1 b/plugins/repository-azure/licenses/azure-core-1.26.0.jar.sha1 new file mode 100644 index 0000000000000..693c6a721959c --- /dev/null +++ b/plugins/repository-azure/licenses/azure-core-1.26.0.jar.sha1 @@ -0,0 +1 @@ +461b89dcf8948a0c4a97d4f1d876f778d0cac7aa \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-core-http-netty-1.11.2.jar.sha1 b/plugins/repository-azure/licenses/azure-core-http-netty-1.11.2.jar.sha1 deleted file mode 100644 index 3d3c0a59a77ba..0000000000000 --- a/plugins/repository-azure/licenses/azure-core-http-netty-1.11.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7d84ec31d73a7b51bc72044789768b25fb2b14f4 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-core-http-netty-1.11.8.jar.sha1 b/plugins/repository-azure/licenses/azure-core-http-netty-1.11.8.jar.sha1 new file mode 100644 index 0000000000000..df7d7ae4ce285 --- /dev/null +++ b/plugins/repository-azure/licenses/azure-core-http-netty-1.11.8.jar.sha1 @@ -0,0 +1 @@ +0ea66d4531fb41cb3b5ab55e2e7b7f301e7f8503 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-storage-blob-12.14.1.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-blob-12.14.1.jar.sha1 deleted file mode 100644 index d9c6f462089e3..0000000000000 --- a/plugins/repository-azure/licenses/azure-storage-blob-12.14.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -384763aef32d779ee22ef3faa03049fee7e0f6de \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-storage-blob-12.14.4.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-blob-12.14.4.jar.sha1 new file mode 100644 index 0000000000000..5333f8fa90ada --- /dev/null +++ b/plugins/repository-azure/licenses/azure-storage-blob-12.14.4.jar.sha1 @@ -0,0 +1 @@ +2b92020693d09e4980b96d278e8038a1087afea0 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-storage-common-12.14.0.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-common-12.14.0.jar.sha1 deleted file mode 100644 index c2fbd451e785e..0000000000000 --- a/plugins/repository-azure/licenses/azure-storage-common-12.14.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ed58d3438a7fa3a2a5e9f60c0111795101dc8bf6 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-storage-common-12.15.0.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-common-12.15.0.jar.sha1 new file mode 100644 index 0000000000000..1f3adfc161c7f --- /dev/null +++ b/plugins/repository-azure/licenses/azure-storage-common-12.15.0.jar.sha1 @@ -0,0 +1 @@ +4d63ce8bbd20379c5e5262b1204ceac7b31a7743 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/commons-lang3-3.12.0.jar.sha1 b/plugins/repository-azure/licenses/commons-lang3-3.12.0.jar.sha1 new file mode 100644 index 0000000000000..9273d8c01aaba --- /dev/null +++ b/plugins/repository-azure/licenses/commons-lang3-3.12.0.jar.sha1 @@ -0,0 +1 @@ +c6842c86792ff03b9f1d1fe2aab8dc23aa6c6f0e \ No newline at end of file diff --git a/plugins/repository-azure/licenses/commons-lang3-3.4.jar.sha1 b/plugins/repository-azure/licenses/commons-lang3-3.4.jar.sha1 deleted file mode 100644 index fdd7040377b8f..0000000000000 --- a/plugins/repository-azure/licenses/commons-lang3-3.4.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5fe28b9518e58819180a43a850fbc0dd24b7c050 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/guava-30.1.1-jre.jar.sha1 b/plugins/repository-azure/licenses/guava-30.1.1-jre.jar.sha1 deleted file mode 100644 index 39e641fc7834f..0000000000000 --- a/plugins/repository-azure/licenses/guava-30.1.1-jre.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -87e0fd1df874ea3cbe577702fe6f17068b790fd8 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/guava-31.1-jre.jar.sha1 b/plugins/repository-azure/licenses/guava-31.1-jre.jar.sha1 new file mode 100644 index 0000000000000..e57390ebe1299 --- /dev/null +++ b/plugins/repository-azure/licenses/guava-31.1-jre.jar.sha1 @@ -0,0 +1 @@ +60458f877d055d0c9114d9e1a2efb737b4bc282c \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-core-3.4.11.jar.sha1 b/plugins/repository-azure/licenses/reactor-core-3.4.11.jar.sha1 deleted file mode 100644 index fc0911be8fedf..0000000000000 --- a/plugins/repository-azure/licenses/reactor-core-3.4.11.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0e305f6aa6e6da26aa42726f8cfd69b6ab53d7c0 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-core-3.4.15.jar.sha1 b/plugins/repository-azure/licenses/reactor-core-3.4.15.jar.sha1 new file mode 100644 index 0000000000000..a89de48b20b51 --- /dev/null +++ b/plugins/repository-azure/licenses/reactor-core-3.4.15.jar.sha1 @@ -0,0 +1 @@ +28ccf513fe64709c8ded30ea3f387fc718db9626 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-1.0.13.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-1.0.13.jar.sha1 deleted file mode 100644 index be6cfc229b9b2..0000000000000 --- a/plugins/repository-azure/licenses/reactor-netty-1.0.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cf216a9ba6b50210664761add9db744c9c3f51d8 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-1.0.16.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-1.0.16.jar.sha1 new file mode 100644 index 0000000000000..582380e449a1d --- /dev/null +++ b/plugins/repository-azure/licenses/reactor-netty-1.0.16.jar.sha1 @@ -0,0 +1 @@ +d90829f6127966b0c35c4a3e8e23ca9ed29cd8a5 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-core-1.0.13.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-core-1.0.13.jar.sha1 deleted file mode 100644 index 8f81861f48dde..0000000000000 --- a/plugins/repository-azure/licenses/reactor-netty-core-1.0.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a67949c5946dd66c7ab0a3b059213c23345c32b1 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-core-1.0.16.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-core-1.0.16.jar.sha1 new file mode 100644 index 0000000000000..0d1a0cb20c80f --- /dev/null +++ b/plugins/repository-azure/licenses/reactor-netty-core-1.0.16.jar.sha1 @@ -0,0 +1 @@ +8f842a912677f2bc614ff60fb9e786d4fa429c34 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-http-1.0.13.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-http-1.0.13.jar.sha1 deleted file mode 100644 index e6b4cb0b9a4e8..0000000000000 --- a/plugins/repository-azure/licenses/reactor-netty-http-1.0.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -de7a38101098db9438c18fdd09acc5b79a2ec02a \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-http-1.0.16.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-http-1.0.16.jar.sha1 new file mode 100644 index 0000000000000..d737315b06b62 --- /dev/null +++ b/plugins/repository-azure/licenses/reactor-netty-http-1.0.16.jar.sha1 @@ -0,0 +1 @@ +93edb9a1dc774d843551a616e0f316e11ffa81ed \ No newline at end of file diff --git a/plugins/repository-azure/licenses/woodstox-core-6.1.1.jar.sha1 b/plugins/repository-azure/licenses/woodstox-core-6.1.1.jar.sha1 deleted file mode 100644 index f2ad1c80882d3..0000000000000 --- a/plugins/repository-azure/licenses/woodstox-core-6.1.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -989bb31963ed1758b95c7c4381a91592a9a8df61 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/woodstox-core-6.2.8.jar.sha1 b/plugins/repository-azure/licenses/woodstox-core-6.2.8.jar.sha1 new file mode 100644 index 0000000000000..ae65cdebf26de --- /dev/null +++ b/plugins/repository-azure/licenses/woodstox-core-6.2.8.jar.sha1 @@ -0,0 +1 @@ +670748292899c53b1963730d9eb7f8ab71314e90 \ No newline at end of file diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureBlobStore.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureBlobStore.java index 6345103c6ecc6..b540dd83c95a2 100644 --- a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureBlobStore.java +++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureBlobStore.java @@ -35,7 +35,6 @@ import com.azure.core.http.HttpMethod; import com.azure.core.http.HttpRequest; import com.azure.core.http.HttpResponse; -import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.Response; import com.azure.core.util.Context; import com.azure.storage.blob.BlobClient; @@ -52,7 +51,6 @@ import com.azure.storage.blob.options.BlobParallelUploadOptions; import com.azure.storage.common.implementation.Constants; -import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.util.Throwables; @@ -84,7 +82,6 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Map; -import java.util.Optional; import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicLong; @@ -220,71 +217,50 @@ public DeleteResult deleteBlobDirectory(String path, Executor executor) throws U final ListBlobsOptions listBlobsOptions = new ListBlobsOptions().setPrefix(path); SocketAccess.doPrivilegedVoidException(() -> { - String continuationToken = null; - - do { - // Fetch one page at a time, others are going to be fetched by continuation token - // TODO: reconsider reverting to simplified approach once https://github.com/Azure/azure-sdk-for-java/issues/26064 - // gets addressed. - final Optional> pageOpt = blobContainer.listBlobs(listBlobsOptions, timeout()) - .streamByPage(continuationToken) - .findFirst(); - - if (!pageOpt.isPresent()) { - // No more pages, should never happen - break; - } - - final PagedResponse page = pageOpt.get(); - for (final BlobItem blobItem : page.getValue()) { - // Skipping prefixes as those are not deletable and should not be there - assert (blobItem.isPrefix() == null || !blobItem.isPrefix()) : "Only blobs (not prefixes) are expected"; - - outstanding.incrementAndGet(); - executor.execute(new AbstractRunnable() { - @Override - protected void doRun() throws Exception { - final long len = blobItem.getProperties().getContentLength(); - - final BlobClient azureBlob = blobContainer.getBlobClient(blobItem.getName()); - logger.trace( - () -> new ParameterizedMessage("container [{}]: blob [{}] found. removing.", container, blobItem.getName()) - ); - final Response response = azureBlob.deleteWithResponse(null, null, timeout(), client.v2().get()); - logger.trace( - () -> new ParameterizedMessage( - "container [{}]: blob [{}] deleted status [{}].", - container, - blobItem.getName(), - response.getStatusCode() - ) - ); - - blobsDeleted.incrementAndGet(); - if (len >= 0) { - bytesDeleted.addAndGet(len); - } + for (final BlobItem blobItem : blobContainer.listBlobs(listBlobsOptions, timeout())) { + // Skipping prefixes as those are not deletable and should not be there + assert (blobItem.isPrefix() == null || !blobItem.isPrefix()) : "Only blobs (not prefixes) are expected"; + + outstanding.incrementAndGet(); + executor.execute(new AbstractRunnable() { + @Override + protected void doRun() throws Exception { + final long len = blobItem.getProperties().getContentLength(); + + final BlobClient azureBlob = blobContainer.getBlobClient(blobItem.getName()); + logger.trace( + () -> new ParameterizedMessage("container [{}]: blob [{}] found. removing.", container, blobItem.getName()) + ); + final Response response = azureBlob.deleteWithResponse(null, null, timeout(), client.v2().get()); + logger.trace( + () -> new ParameterizedMessage( + "container [{}]: blob [{}] deleted status [{}].", + container, + blobItem.getName(), + response.getStatusCode() + ) + ); + + blobsDeleted.incrementAndGet(); + if (len >= 0) { + bytesDeleted.addAndGet(len); } + } - @Override - public void onFailure(Exception e) { - exceptions.add(e); - } + @Override + public void onFailure(Exception e) { + exceptions.add(e); + } - @Override - public void onAfter() { - if (outstanding.decrementAndGet() == 0) { - result.onResponse(null); - } + @Override + public void onAfter() { + if (outstanding.decrementAndGet() == 0) { + result.onResponse(null); } - }); - } - - // Fetch next continuation token - continuationToken = page.getContinuationToken(); - } while (StringUtils.isNotBlank(continuationToken)); + } + }); + } }); - if (outstanding.decrementAndGet() == 0) { result.onResponse(null); } @@ -325,39 +301,19 @@ public Map listBlobsByPrefix(String keyPath, String prefix .setPrefix(keyPath + (prefix == null ? "" : prefix)); SocketAccess.doPrivilegedVoidException(() -> { - String continuationToken = null; - - do { - // Fetch one page at a time, others are going to be fetched by continuation token - // TODO: reconsider reverting to simplified approach once https://github.com/Azure/azure-sdk-for-java/issues/26064 - // gets addressed - final Optional> pageOpt = blobContainer.listBlobsByHierarchy("/", listBlobsOptions, timeout()) - .streamByPage(continuationToken) - .findFirst(); - - if (!pageOpt.isPresent()) { - // No more pages, should never happen - break; + for (final BlobItem blobItem : blobContainer.listBlobsByHierarchy("/", listBlobsOptions, timeout())) { + // Skipping over the prefixes, only look for the blobs + if (blobItem.isPrefix() != null && blobItem.isPrefix()) { + continue; } - final PagedResponse page = pageOpt.get(); - for (final BlobItem blobItem : page.getValue()) { - // Skipping over the prefixes, only look for the blobs - if (blobItem.isPrefix() != null && blobItem.isPrefix()) { - continue; - } + final String name = getBlobName(blobItem.getName(), container, keyPath); + logger.trace(() -> new ParameterizedMessage("blob name [{}]", name)); - final String name = getBlobName(blobItem.getName(), container, keyPath); - logger.trace(() -> new ParameterizedMessage("blob name [{}]", name)); - - final BlobItemProperties properties = blobItem.getProperties(); - logger.trace(() -> new ParameterizedMessage("blob name [{}], size [{}]", name, properties.getContentLength())); - blobsBuilder.put(name, new PlainBlobMetadata(name, properties.getContentLength())); - } - - // Fetch next continuation token - continuationToken = page.getContinuationToken(); - } while (StringUtils.isNotBlank(continuationToken)); + final BlobItemProperties properties = blobItem.getProperties(); + logger.trace(() -> new ParameterizedMessage("blob name [{}], size [{}]", name, properties.getContentLength())); + blobsBuilder.put(name, new PlainBlobMetadata(name, properties.getContentLength())); + } }); return MapBuilder.newMapBuilder(blobsBuilder).immutableMap(); @@ -373,36 +329,17 @@ public Map children(BlobPath path) throws URISyntaxExcept .setPrefix(keyPath); SocketAccess.doPrivilegedVoidException(() -> { - String continuationToken = null; - - do { - // Fetch one page at a time, others are going to be fetched by continuation token - // TODO: reconsider reverting to simplified approach once https://github.com/Azure/azure-sdk-for-java/issues/26064 - // gets addressed - final Optional> pageOpt = blobContainer.listBlobsByHierarchy("/", listBlobsOptions, timeout()) - .streamByPage(continuationToken) - .findFirst(); - - if (!pageOpt.isPresent()) { - // No more pages, should never happen - break; - } - - final PagedResponse page = pageOpt.get(); - for (final BlobItem blobItem : page.getValue()) { - // Skipping over the blobs, only look for prefixes - if (blobItem.isPrefix() != null && blobItem.isPrefix()) { - // Expecting name in the form /container/keyPath.* and we want to strip off the /container/ - // this requires 1 + container.length() + 1, with each 1 corresponding to one of the /. - // Lastly, we add the length of keyPath to the offset to strip this container's path. - final String name = getBlobName(blobItem.getName(), container, keyPath).replaceAll("/$", ""); - logger.trace(() -> new ParameterizedMessage("blob name [{}]", name)); - blobsBuilder.add(name); - } + for (final BlobItem blobItem : blobContainer.listBlobsByHierarchy("/", listBlobsOptions, timeout())) { + // Skipping over the blobs, only look for prefixes + if (blobItem.isPrefix() != null && blobItem.isPrefix()) { + // Expecting name in the form /container/keyPath.* and we want to strip off the /container/ + // this requires 1 + container.length() + 1, with each 1 corresponding to one of the /. + // Lastly, we add the length of keyPath to the offset to strip this container's path. + final String name = getBlobName(blobItem.getName(), container, keyPath).replaceAll("/$", ""); + logger.trace(() -> new ParameterizedMessage("blob name [{}]", name)); + blobsBuilder.add(name); } - // Fetch next continuation token - continuationToken = page.getContinuationToken(); - } while (StringUtils.isNotBlank(continuationToken)); + } }); return Collections.unmodifiableMap( @@ -413,8 +350,8 @@ public Map children(BlobPath path) throws URISyntaxExcept public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws URISyntaxException, BlobStorageException, IOException { - assert inputStream - .markSupported() : "Should not be used with non-mark supporting streams as their retry handling in the SDK is broken"; + assert inputStream.markSupported() + : "Should not be used with non-mark supporting streams as their retry handling in the SDK is broken"; logger.trace(() -> new ParameterizedMessage("writeBlob({}, stream, {})", blobName, blobSize)); final Tuple> client = client(); final BlobContainerClient blobContainer = client.v1().getBlobContainerClient(container); diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureRepositoryPlugin.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureRepositoryPlugin.java index aa41941436171..82ab5243a09aa 100644 --- a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureRepositoryPlugin.java +++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureRepositoryPlugin.java @@ -94,13 +94,15 @@ public List> getSettings() { AzureStorageSettings.ENDPOINT_SUFFIX_SETTING, AzureStorageSettings.TIMEOUT_SETTING, AzureStorageSettings.MAX_RETRIES_SETTING, - AzureStorageSettings.PROXY_TYPE_SETTING, - AzureStorageSettings.PROXY_HOST_SETTING, - AzureStorageSettings.PROXY_PORT_SETTING, AzureStorageSettings.CONNECT_TIMEOUT_SETTING, AzureStorageSettings.WRITE_TIMEOUT_SETTING, AzureStorageSettings.READ_TIMEOUT_SETTING, - AzureStorageSettings.RESPONSE_TIMEOUT_SETTING + AzureStorageSettings.RESPONSE_TIMEOUT_SETTING, + AzureStorageSettings.PROXY_TYPE_SETTING, + AzureStorageSettings.PROXY_HOST_SETTING, + AzureStorageSettings.PROXY_PORT_SETTING, + AzureStorageSettings.PROXY_USERNAME_SETTING, + AzureStorageSettings.PROXY_PASSWORD_SETTING ); } diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java index 6cd3a149c6957..3800be7c2d27d 100644 --- a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java @@ -43,7 +43,6 @@ import com.azure.core.http.HttpRequest; import com.azure.core.http.HttpResponse; import com.azure.core.http.ProxyOptions; -import com.azure.core.http.ProxyOptions.Type; import com.azure.core.http.netty.NettyAsyncHttpClientBuilder; import com.azure.core.http.policy.HttpPipelinePolicy; import com.azure.core.util.Configuration; @@ -66,12 +65,11 @@ import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; -import java.net.InetSocketAddress; -import java.net.Proxy; +import java.net.Authenticator; +import java.net.PasswordAuthentication; import java.net.URISyntaxException; import java.security.InvalidKeyException; import java.time.Duration; -import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -169,15 +167,20 @@ private ClientState buildClient(AzureStorageSettings azureStorageSettings, BiCon final NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(new NioThreadFactory()); final NettyAsyncHttpClientBuilder clientBuilder = new NettyAsyncHttpClientBuilder().eventLoopGroup(eventLoopGroup); - final Proxy proxy = azureStorageSettings.getProxy(); - if (proxy != null) { - final Type type = Arrays.stream(Type.values()) - .filter(t -> t.toProxyType().equals(proxy.type())) - .findFirst() - .orElseThrow(() -> new IllegalArgumentException("Unsupported proxy type: " + proxy.type())); - - clientBuilder.proxy(new ProxyOptions(type, (InetSocketAddress) proxy.address())); - } + SocketAccess.doPrivilegedVoidException(() -> { + final ProxySettings proxySettings = azureStorageSettings.getProxySettings(); + if (proxySettings != ProxySettings.NO_PROXY_SETTINGS) { + if (proxySettings.isAuthenticated()) { + Authenticator.setDefault(new Authenticator() { + @Override + protected PasswordAuthentication getPasswordAuthentication() { + return new PasswordAuthentication(proxySettings.getUsername(), proxySettings.getPassword().toCharArray()); + } + }); + } + clientBuilder.proxy(new ProxyOptions(proxySettings.getType().toProxyType(), proxySettings.getAddress())); + } + }); final TimeValue connectTimeout = azureStorageSettings.getConnectTimeout(); if (connectTimeout != null) { diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageSettings.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageSettings.java index 94ec553ab760e..c9a031451bccd 100644 --- a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageSettings.java +++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageSettings.java @@ -44,8 +44,6 @@ import org.opensearch.common.settings.SettingsException; import org.opensearch.common.unit.TimeValue; import java.net.InetAddress; -import java.net.InetSocketAddress; -import java.net.Proxy; import java.net.UnknownHostException; import java.util.Collections; import java.util.HashMap; @@ -143,10 +141,10 @@ final class AzureStorageSettings { ); /** The type of the proxy to connect to azure through. Can be direct (no proxy, default), http or socks */ - public static final AffixSetting PROXY_TYPE_SETTING = Setting.affixKeySetting( + public static final AffixSetting PROXY_TYPE_SETTING = Setting.affixKeySetting( AZURE_CLIENT_PREFIX_KEY, "proxy.type", - (key) -> new Setting<>(key, "direct", s -> Proxy.Type.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope), + (key) -> new Setting<>(key, "direct", s -> ProxySettings.ProxyType.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope), () -> ACCOUNT_SETTING, () -> KEY_SETTING ); @@ -162,27 +160,50 @@ final class AzureStorageSettings { ); /** The port of a proxy to connect to azure through. */ - public static final Setting PROXY_PORT_SETTING = Setting.affixKeySetting( + public static final AffixSetting PROXY_PORT_SETTING = Setting.affixKeySetting( AZURE_CLIENT_PREFIX_KEY, "proxy.port", (key) -> Setting.intSetting(key, 0, 0, 65535, Setting.Property.NodeScope), + () -> KEY_SETTING, () -> ACCOUNT_SETTING, + () -> PROXY_TYPE_SETTING, + () -> PROXY_HOST_SETTING + ); + + /** The username of a proxy to connect */ + static final AffixSetting PROXY_USERNAME_SETTING = Setting.affixKeySetting( + AZURE_CLIENT_PREFIX_KEY, + "proxy.username", + key -> SecureSetting.secureString(key, null), () -> KEY_SETTING, + () -> ACCOUNT_SETTING, () -> PROXY_TYPE_SETTING, () -> PROXY_HOST_SETTING ); + /** The password of a proxy to connect */ + static final AffixSetting PROXY_PASSWORD_SETTING = Setting.affixKeySetting( + AZURE_CLIENT_PREFIX_KEY, + "proxy.password", + key -> SecureSetting.secureString(key, null), + () -> KEY_SETTING, + () -> ACCOUNT_SETTING, + () -> PROXY_TYPE_SETTING, + () -> PROXY_HOST_SETTING, + () -> PROXY_USERNAME_SETTING + ); + private final String account; private final String connectString; private final String endpointSuffix; private final TimeValue timeout; private final int maxRetries; - private final Proxy proxy; private final LocationMode locationMode; private final TimeValue connectTimeout; private final TimeValue writeTimeout; private final TimeValue readTimeout; private final TimeValue responseTimeout; + private final ProxySettings proxySettings; // copy-constructor private AzureStorageSettings( @@ -191,24 +212,24 @@ private AzureStorageSettings( String endpointSuffix, TimeValue timeout, int maxRetries, - Proxy proxy, LocationMode locationMode, TimeValue connectTimeout, TimeValue writeTimeout, TimeValue readTimeout, - TimeValue responseTimeout + TimeValue responseTimeout, + ProxySettings proxySettings ) { this.account = account; this.connectString = connectString; this.endpointSuffix = endpointSuffix; this.timeout = timeout; this.maxRetries = maxRetries; - this.proxy = proxy; this.locationMode = locationMode; this.connectTimeout = connectTimeout; this.writeTimeout = writeTimeout; this.readTimeout = readTimeout; this.responseTimeout = responseTimeout; + this.proxySettings = proxySettings; } private AzureStorageSettings( @@ -218,42 +239,23 @@ private AzureStorageSettings( String endpointSuffix, TimeValue timeout, int maxRetries, - Proxy.Type proxyType, - String proxyHost, - Integer proxyPort, TimeValue connectTimeout, TimeValue writeTimeout, TimeValue readTimeout, - TimeValue responseTimeout + TimeValue responseTimeout, + ProxySettings proxySettings ) { this.account = account; this.connectString = buildConnectString(account, key, sasToken, endpointSuffix); this.endpointSuffix = endpointSuffix; this.timeout = timeout; this.maxRetries = maxRetries; - // Register the proxy if we have any - // Validate proxy settings - if (proxyType.equals(Proxy.Type.DIRECT) && ((proxyPort != 0) || Strings.hasText(proxyHost))) { - throw new SettingsException("Azure Proxy port or host have been set but proxy type is not defined."); - } - if ((proxyType.equals(Proxy.Type.DIRECT) == false) && ((proxyPort == 0) || Strings.isEmpty(proxyHost))) { - throw new SettingsException("Azure Proxy type has been set but proxy host or port is not defined."); - } - - if (proxyType.equals(Proxy.Type.DIRECT)) { - proxy = null; - } else { - try { - proxy = new Proxy(proxyType, new InetSocketAddress(InetAddress.getByName(proxyHost), proxyPort)); - } catch (final UnknownHostException e) { - throw new SettingsException("Azure proxy host is unknown.", e); - } - } this.locationMode = LocationMode.PRIMARY_ONLY; this.connectTimeout = connectTimeout; this.writeTimeout = writeTimeout; this.readTimeout = readTimeout; this.responseTimeout = responseTimeout; + this.proxySettings = proxySettings; } public String getEndpointSuffix() { @@ -268,8 +270,8 @@ public int getMaxRetries() { return maxRetries; } - public Proxy getProxy() { - return proxy; + public ProxySettings getProxySettings() { + return proxySettings; } public String getConnectString() { @@ -325,7 +327,7 @@ public String toString() { sb.append(", timeout=").append(timeout); sb.append(", endpointSuffix='").append(endpointSuffix).append('\''); sb.append(", maxRetries=").append(maxRetries); - sb.append(", proxy=").append(proxy); + sb.append(", proxySettings=").append(proxySettings != ProxySettings.NO_PROXY_SETTINGS ? "PROXY_SET" : "PROXY_NOT_SET"); sb.append(", locationMode='").append(locationMode).append('\''); sb.append(", connectTimeout='").append(connectTimeout).append('\''); sb.append(", writeTimeout='").append(writeTimeout).append('\''); @@ -371,17 +373,42 @@ private static AzureStorageSettings getClientSettings(Settings settings, String getValue(settings, clientName, ENDPOINT_SUFFIX_SETTING), getValue(settings, clientName, TIMEOUT_SETTING), getValue(settings, clientName, MAX_RETRIES_SETTING), - getValue(settings, clientName, PROXY_TYPE_SETTING), - getValue(settings, clientName, PROXY_HOST_SETTING), - getValue(settings, clientName, PROXY_PORT_SETTING), getValue(settings, clientName, CONNECT_TIMEOUT_SETTING), getValue(settings, clientName, WRITE_TIMEOUT_SETTING), getValue(settings, clientName, READ_TIMEOUT_SETTING), - getValue(settings, clientName, RESPONSE_TIMEOUT_SETTING) + getValue(settings, clientName, RESPONSE_TIMEOUT_SETTING), + validateAndCreateProxySettings(settings, clientName) ); } } + static ProxySettings validateAndCreateProxySettings(final Settings settings, final String clientName) { + final ProxySettings.ProxyType proxyType = getConfigValue(settings, clientName, PROXY_TYPE_SETTING); + final String proxyHost = getConfigValue(settings, clientName, PROXY_HOST_SETTING); + final int proxyPort = getConfigValue(settings, clientName, PROXY_PORT_SETTING); + final SecureString proxyUserName = getConfigValue(settings, clientName, PROXY_USERNAME_SETTING); + final SecureString proxyPassword = getConfigValue(settings, clientName, PROXY_PASSWORD_SETTING); + // Validate proxy settings + if (proxyType == ProxySettings.ProxyType.DIRECT + && (proxyPort != 0 || Strings.hasText(proxyHost) || Strings.hasText(proxyUserName) || Strings.hasText(proxyPassword))) { + throw new SettingsException("Azure proxy port or host or username or password have been set but proxy type is not defined."); + } + if (proxyType != ProxySettings.ProxyType.DIRECT && (proxyPort == 0 || Strings.isEmpty(proxyHost))) { + throw new SettingsException("Azure proxy type has been set but proxy host or port is not defined."); + } + + if (proxyType == ProxySettings.ProxyType.DIRECT) { + return ProxySettings.NO_PROXY_SETTINGS; + } + + try { + final InetAddress proxyHostAddress = InetAddress.getByName(proxyHost); + return new ProxySettings(proxyType, proxyHostAddress, proxyPort, proxyUserName.toString(), proxyPassword.toString()); + } catch (final UnknownHostException e) { + throw new SettingsException("Azure proxy host is unknown.", e); + } + } + private static T getConfigValue(Settings settings, String clientName, Setting.AffixSetting clientSetting) { final Setting concreteSetting = clientSetting.getConcreteSettingForNamespace(clientName); return concreteSetting.get(settings); @@ -407,12 +434,12 @@ static Map overrideLocationMode( entry.getValue().endpointSuffix, entry.getValue().timeout, entry.getValue().maxRetries, - entry.getValue().proxy, locationMode, entry.getValue().connectTimeout, entry.getValue().writeTimeout, entry.getValue().readTimeout, - entry.getValue().responseTimeout + entry.getValue().responseTimeout, + entry.getValue().getProxySettings() ) ); } diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/ProxySettings.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/ProxySettings.java new file mode 100644 index 0000000000000..df8c95e69acf2 --- /dev/null +++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/ProxySettings.java @@ -0,0 +1,110 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.repositories.azure; + +import com.azure.core.http.ProxyOptions; +import org.opensearch.common.Strings; +import org.opensearch.common.settings.SettingsException; + +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.util.Objects; + +public class ProxySettings { + + public static final ProxySettings NO_PROXY_SETTINGS = new ProxySettings(ProxyType.DIRECT, null, -1, null, null); + + private final ProxyType type; + + private final InetAddress host; + + private final String username; + + private final String password; + + private final int port; + + public static enum ProxyType { + HTTP(ProxyOptions.Type.HTTP.name()), + + /** + * Please use SOCKS4 instead + */ + @Deprecated + SOCKS(ProxyOptions.Type.SOCKS4.name()), + + SOCKS4(ProxyOptions.Type.SOCKS4.name()), + + SOCKS5(ProxyOptions.Type.SOCKS5.name()), + + DIRECT("DIRECT"); + + private final String name; + + private ProxyType(String name) { + this.name = name; + } + + public ProxyOptions.Type toProxyType() { + if (this == DIRECT) { + // We check it in settings, + // the probability that it could be thrown is small, but how knows + throw new SettingsException("Couldn't convert to Azure proxy type"); + } + return ProxyOptions.Type.valueOf(name()); + } + + } + + public ProxySettings(final ProxyType type, final InetAddress host, final int port, final String username, final String password) { + this.type = type; + this.host = host; + this.port = port; + this.username = username; + this.password = password; + } + + public ProxyType getType() { + return this.type; + } + + public InetSocketAddress getAddress() { + return new InetSocketAddress(host, port); + } + + public String getUsername() { + return this.username; + } + + public String getPassword() { + return this.password; + } + + public boolean isAuthenticated() { + return Strings.isNullOrEmpty(username) == false && Strings.isNullOrEmpty(password) == false; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + final ProxySettings that = (ProxySettings) o; + return port == that.port + && type == that.type + && Objects.equals(host, that.host) + && Objects.equals(username, that.username) + && Objects.equals(password, that.password); + } + + @Override + public int hashCode() { + return Objects.hash(type, host, username, password, port); + } + +} diff --git a/plugins/repository-azure/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-azure/src/main/plugin-metadata/plugin-security.policy index f6d0f5fcb08d5..f3bf52ea46505 100644 --- a/plugins/repository-azure/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-azure/src/main/plugin-metadata/plugin-security.policy @@ -38,4 +38,7 @@ grant { permission java.lang.RuntimePermission "accessDeclaredMembers"; permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; permission java.lang.RuntimePermission "setContextClassLoader"; + + // azure client set Authenticator for proxy username/password + permission java.net.NetPermission "setDefaultAuthenticator"; }; diff --git a/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureStorageServiceTests.java b/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureStorageServiceTests.java index 785ebef7307bc..7f5ca73a507ad 100644 --- a/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureStorageServiceTests.java +++ b/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureStorageServiceTests.java @@ -32,6 +32,7 @@ package org.opensearch.repositories.azure; +import org.opensearch.common.Strings; import reactor.core.scheduler.Schedulers; import com.azure.core.http.policy.HttpPipelinePolicy; @@ -50,7 +51,6 @@ import java.io.UncheckedIOException; import java.net.InetAddress; import java.net.InetSocketAddress; -import java.net.Proxy; import java.net.URI; import java.net.URISyntaxException; import java.net.UnknownHostException; @@ -299,9 +299,9 @@ public void testGetSelectedClientBackoffPolicyNbRetries() { public void testNoProxy() { final Settings settings = Settings.builder().setSecureSettings(buildSecureSettings()).build(); final AzureStorageService mock = storageServiceWithSettingsValidation(settings); - assertThat(mock.storageSettings.get("azure1").getProxy(), nullValue()); - assertThat(mock.storageSettings.get("azure2").getProxy(), nullValue()); - assertThat(mock.storageSettings.get("azure3").getProxy(), nullValue()); + assertEquals(mock.storageSettings.get("azure1").getProxySettings(), ProxySettings.NO_PROXY_SETTINGS); + assertEquals(mock.storageSettings.get("azure2").getProxySettings(), ProxySettings.NO_PROXY_SETTINGS); + assertEquals(mock.storageSettings.get("azure3").getProxySettings(), ProxySettings.NO_PROXY_SETTINGS); } public void testProxyHttp() throws UnknownHostException { @@ -312,13 +312,13 @@ public void testProxyHttp() throws UnknownHostException { .put("azure.client.azure1.proxy.type", "http") .build(); final AzureStorageService mock = storageServiceWithSettingsValidation(settings); - final Proxy azure1Proxy = mock.storageSettings.get("azure1").getProxy(); + final ProxySettings azure1Proxy = mock.storageSettings.get("azure1").getProxySettings(); assertThat(azure1Proxy, notNullValue()); - assertThat(azure1Proxy.type(), is(Proxy.Type.HTTP)); - assertThat(azure1Proxy.address(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8080))); - assertThat(mock.storageSettings.get("azure2").getProxy(), nullValue()); - assertThat(mock.storageSettings.get("azure3").getProxy(), nullValue()); + assertThat(azure1Proxy.getType(), is(ProxySettings.ProxyType.HTTP)); + assertThat(azure1Proxy.getAddress(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8080))); + assertEquals(ProxySettings.NO_PROXY_SETTINGS, mock.storageSettings.get("azure2").getProxySettings()); + assertEquals(ProxySettings.NO_PROXY_SETTINGS, mock.storageSettings.get("azure3").getProxySettings()); } public void testMultipleProxies() throws UnknownHostException { @@ -332,52 +332,59 @@ public void testMultipleProxies() throws UnknownHostException { .put("azure.client.azure2.proxy.type", "http") .build(); final AzureStorageService mock = storageServiceWithSettingsValidation(settings); - final Proxy azure1Proxy = mock.storageSettings.get("azure1").getProxy(); + final ProxySettings azure1Proxy = mock.storageSettings.get("azure1").getProxySettings(); assertThat(azure1Proxy, notNullValue()); - assertThat(azure1Proxy.type(), is(Proxy.Type.HTTP)); - assertThat(azure1Proxy.address(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8080))); - final Proxy azure2Proxy = mock.storageSettings.get("azure2").getProxy(); + assertThat(azure1Proxy.getType(), is(ProxySettings.ProxyType.HTTP)); + assertThat(azure1Proxy.getAddress(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8080))); + final ProxySettings azure2Proxy = mock.storageSettings.get("azure2").getProxySettings(); assertThat(azure2Proxy, notNullValue()); - assertThat(azure2Proxy.type(), is(Proxy.Type.HTTP)); - assertThat(azure2Proxy.address(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8081))); - assertThat(mock.storageSettings.get("azure3").getProxy(), nullValue()); + assertThat(azure2Proxy.getType(), is(ProxySettings.ProxyType.HTTP)); + assertThat(azure2Proxy.getAddress(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8081))); + assertTrue(Strings.isNullOrEmpty(azure2Proxy.getUsername())); + assertTrue(Strings.isNullOrEmpty(azure2Proxy.getPassword())); + assertEquals(mock.storageSettings.get("azure3").getProxySettings(), ProxySettings.NO_PROXY_SETTINGS); } public void testProxySocks() throws UnknownHostException { + final MockSecureSettings secureSettings = buildSecureSettings(); + secureSettings.setString("azure.client.azure1.proxy.username", "user"); + secureSettings.setString("azure.client.azure1.proxy.password", "pwd"); final Settings settings = Settings.builder() - .setSecureSettings(buildSecureSettings()) .put("azure.client.azure1.proxy.host", "127.0.0.1") .put("azure.client.azure1.proxy.port", 8080) - .put("azure.client.azure1.proxy.type", "socks") + .put("azure.client.azure1.proxy.type", "socks5") + .setSecureSettings(secureSettings) .build(); final AzureStorageService mock = storageServiceWithSettingsValidation(settings); - final Proxy azure1Proxy = mock.storageSettings.get("azure1").getProxy(); + final ProxySettings azure1Proxy = mock.storageSettings.get("azure1").getProxySettings(); assertThat(azure1Proxy, notNullValue()); - assertThat(azure1Proxy.type(), is(Proxy.Type.SOCKS)); - assertThat(azure1Proxy.address(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8080))); - assertThat(mock.storageSettings.get("azure2").getProxy(), nullValue()); - assertThat(mock.storageSettings.get("azure3").getProxy(), nullValue()); + assertThat(azure1Proxy.getType(), is(ProxySettings.ProxyType.SOCKS5)); + assertThat(azure1Proxy.getAddress(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8080))); + assertEquals("user", azure1Proxy.getUsername()); + assertEquals("pwd", azure1Proxy.getPassword()); + assertEquals(ProxySettings.NO_PROXY_SETTINGS, mock.storageSettings.get("azure2").getProxySettings()); + assertEquals(ProxySettings.NO_PROXY_SETTINGS, mock.storageSettings.get("azure3").getProxySettings()); } public void testProxyNoHost() { final Settings settings = Settings.builder() .setSecureSettings(buildSecureSettings()) .put("azure.client.azure1.proxy.port", 8080) - .put("azure.client.azure1.proxy.type", randomFrom("socks", "http")) + .put("azure.client.azure1.proxy.type", randomFrom("socks", "socks4", "socks5", "http")) .build(); final SettingsException e = expectThrows(SettingsException.class, () -> storageServiceWithSettingsValidation(settings)); - assertEquals("Azure Proxy type has been set but proxy host or port is not defined.", e.getMessage()); + assertEquals("Azure proxy type has been set but proxy host or port is not defined.", e.getMessage()); } public void testProxyNoPort() { final Settings settings = Settings.builder() .setSecureSettings(buildSecureSettings()) .put("azure.client.azure1.proxy.host", "127.0.0.1") - .put("azure.client.azure1.proxy.type", randomFrom("socks", "http")) + .put("azure.client.azure1.proxy.type", randomFrom("socks", "socks4", "socks5", "http")) .build(); final SettingsException e = expectThrows(SettingsException.class, () -> storageServiceWithSettingsValidation(settings)); - assertEquals("Azure Proxy type has been set but proxy host or port is not defined.", e.getMessage()); + assertEquals("Azure proxy type has been set but proxy host or port is not defined.", e.getMessage()); } public void testProxyNoType() { @@ -388,13 +395,13 @@ public void testProxyNoType() { .build(); final SettingsException e = expectThrows(SettingsException.class, () -> storageServiceWithSettingsValidation(settings)); - assertEquals("Azure Proxy port or host have been set but proxy type is not defined.", e.getMessage()); + assertEquals("Azure proxy port or host or username or password have been set but proxy type is not defined.", e.getMessage()); } public void testProxyWrongHost() { final Settings settings = Settings.builder() .setSecureSettings(buildSecureSettings()) - .put("azure.client.azure1.proxy.type", randomFrom("socks", "http")) + .put("azure.client.azure1.proxy.type", randomFrom("socks", "socks4", "socks5", "http")) .put("azure.client.azure1.proxy.host", "thisisnotavalidhostorwehavebeensuperunlucky") .put("azure.client.azure1.proxy.port", 8080) .build(); diff --git a/plugins/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml b/plugins/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml index 650d5c4474199..beaa95b732d52 100644 --- a/plugins/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml +++ b/plugins/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml @@ -45,17 +45,14 @@ setup: body: - index: _index: docs - _type: doc _id: 1 - snapshot: one - index: _index: docs - _type: doc _id: 2 - snapshot: one - index: _index: docs - _type: doc _id: 3 - snapshot: one @@ -93,22 +90,18 @@ setup: body: - index: _index: docs - _type: doc _id: 4 - snapshot: two - index: _index: docs - _type: doc _id: 5 - snapshot: two - index: _index: docs - _type: doc _id: 6 - snapshot: two - index: _index: docs - _type: doc _id: 7 - snapshot: two diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index adb97855887f0..c7e7bc5f40cce 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -165,39 +165,23 @@ thirdPartyAudit { 'org.apache.http.client.RedirectHandler', 'org.apache.http.client.RequestDirector', 'org.apache.http.client.UserTokenHandler', - 'org.apache.http.client.methods.HttpDelete', 'org.apache.http.client.methods.HttpEntityEnclosingRequestBase', - 'org.apache.http.client.methods.HttpGet', - 'org.apache.http.client.methods.HttpHead', - 'org.apache.http.client.methods.HttpOptions', - 'org.apache.http.client.methods.HttpPost', - 'org.apache.http.client.methods.HttpPut', 'org.apache.http.client.methods.HttpRequestBase', - 'org.apache.http.client.methods.HttpTrace', 'org.apache.http.config.SocketConfig', 'org.apache.http.config.SocketConfig$Builder', 'org.apache.http.conn.ClientConnectionManager', 'org.apache.http.conn.ConnectionKeepAliveStrategy', 'org.apache.http.conn.params.ConnManagerParams', - 'org.apache.http.conn.params.ConnPerRouteBean', 'org.apache.http.conn.params.ConnRouteParams', 'org.apache.http.conn.routing.HttpRoutePlanner', 'org.apache.http.conn.scheme.PlainSocketFactory', - 'org.apache.http.conn.scheme.Scheme', 'org.apache.http.conn.scheme.SchemeRegistry', - 'org.apache.http.conn.ssl.SSLConnectionSocketFactory', 'org.apache.http.conn.ssl.SSLSocketFactory', 'org.apache.http.conn.ssl.X509HostnameVerifier', 'org.apache.http.entity.AbstractHttpEntity', 'org.apache.http.impl.client.DefaultHttpClient', - 'org.apache.http.impl.client.DefaultHttpRequestRetryHandler', 'org.apache.http.impl.client.HttpClientBuilder', 'org.apache.http.impl.conn.PoolingHttpClientConnectionManager', - 'org.apache.http.impl.conn.ProxySelectorRoutePlanner', - 'org.apache.http.impl.conn.SystemDefaultRoutePlanner', - 'org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager', - 'org.apache.http.message.BasicHttpResponse', - 'org.apache.http.params.BasicHttpParams', 'org.apache.http.params.HttpConnectionParams', 'org.apache.http.params.HttpParams', 'org.apache.http.params.HttpProtocolParams', diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageClientSettings.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageClientSettings.java index d15b00712dea4..e8700570d2801 100644 --- a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageClientSettings.java +++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageClientSettings.java @@ -36,17 +36,23 @@ import org.opensearch.common.Strings; import org.opensearch.common.settings.SecureSetting; +import org.opensearch.common.settings.SecureString; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; +import org.opensearch.common.settings.SettingsException; import org.opensearch.common.unit.TimeValue; import java.io.IOException; import java.io.InputStream; import java.io.UncheckedIOException; +import java.net.InetAddress; +import java.net.Proxy; import java.net.URI; +import java.net.UnknownHostException; import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.Locale; import java.util.Map; import java.util.function.Function; @@ -114,6 +120,54 @@ public class GoogleCloudStorageClientSettings { key -> new Setting<>(key, "repository-gcs", Function.identity(), Setting.Property.NodeScope, Setting.Property.Deprecated) ); + /** Proxy type */ + static final Setting.AffixSetting PROXY_TYPE_SETTING = Setting.affixKeySetting( + PREFIX, + "proxy.type", + (key) -> new Setting( + key, + Proxy.Type.DIRECT.name(), + s -> Proxy.Type.valueOf(s.toUpperCase(Locale.ROOT)), + Setting.Property.NodeScope + ) + ); + + /** The host of a proxy to connect */ + static final Setting.AffixSetting PROXY_HOST_SETTING = Setting.affixKeySetting( + PREFIX, + "proxy.host", + key -> Setting.simpleString(key, Setting.Property.NodeScope), + () -> PROXY_TYPE_SETTING + ); + + /** The port of a proxy to connect */ + static final Setting.AffixSetting PROXY_PORT_SETTING = Setting.affixKeySetting( + PREFIX, + "proxy.port", + key -> Setting.intSetting(key, 0, 0, (1 << 16) - 1, Setting.Property.NodeScope), + () -> PROXY_TYPE_SETTING, + () -> PROXY_HOST_SETTING + ); + + /** The username of a proxy to connect */ + static final Setting.AffixSetting PROXY_USERNAME_SETTING = Setting.affixKeySetting( + PREFIX, + "proxy.username", + key -> SecureSetting.secureString(key, null), + () -> PROXY_TYPE_SETTING, + () -> PROXY_HOST_SETTING + ); + + /** The password of a proxy to connect */ + static final Setting.AffixSetting PROXY_PASSWORD_SETTING = Setting.affixKeySetting( + PREFIX, + "proxy.password", + key -> SecureSetting.secureString(key, null), + () -> PROXY_TYPE_SETTING, + () -> PROXY_HOST_SETTING, + () -> PROXY_USERNAME_SETTING + ); + /** The credentials used by the client to connect to the Storage endpoint. */ private final ServiceAccountCredentials credential; @@ -135,6 +189,9 @@ public class GoogleCloudStorageClientSettings { /** The token server URI. This leases access tokens in the oauth flow. */ private final URI tokenUri; + /** The GCS SDK Proxy settings. */ + private final ProxySettings proxySettings; + GoogleCloudStorageClientSettings( final ServiceAccountCredentials credential, final String endpoint, @@ -142,7 +199,8 @@ public class GoogleCloudStorageClientSettings { final TimeValue connectTimeout, final TimeValue readTimeout, final String applicationName, - final URI tokenUri + final URI tokenUri, + final ProxySettings proxySettings ) { this.credential = credential; this.endpoint = endpoint; @@ -151,6 +209,7 @@ public class GoogleCloudStorageClientSettings { this.readTimeout = readTimeout; this.applicationName = applicationName; this.tokenUri = tokenUri; + this.proxySettings = proxySettings; } public ServiceAccountCredentials getCredential() { @@ -181,6 +240,10 @@ public URI getTokenUri() { return tokenUri; } + public ProxySettings getProxySettings() { + return proxySettings; + } + public static Map load(final Settings settings) { final Map clients = new HashMap<>(); for (final String clientName : settings.getGroups(PREFIX).keySet()) { @@ -202,10 +265,39 @@ static GoogleCloudStorageClientSettings getClientSettings(final Settings setting getConfigValue(settings, clientName, CONNECT_TIMEOUT_SETTING), getConfigValue(settings, clientName, READ_TIMEOUT_SETTING), getConfigValue(settings, clientName, APPLICATION_NAME_SETTING), - getConfigValue(settings, clientName, TOKEN_URI_SETTING) + getConfigValue(settings, clientName, TOKEN_URI_SETTING), + validateAndCreateProxySettings(settings, clientName) ); } + static ProxySettings validateAndCreateProxySettings(final Settings settings, final String clientName) { + final Proxy.Type proxyType = getConfigValue(settings, clientName, PROXY_TYPE_SETTING); + final String proxyHost = getConfigValue(settings, clientName, PROXY_HOST_SETTING); + final int proxyPort = getConfigValue(settings, clientName, PROXY_PORT_SETTING); + final SecureString proxyUserName = getConfigValue(settings, clientName, PROXY_USERNAME_SETTING); + final SecureString proxyPassword = getConfigValue(settings, clientName, PROXY_PASSWORD_SETTING); + // Validate proxy settings + if (proxyType == Proxy.Type.DIRECT + && (proxyPort != 0 || Strings.hasText(proxyHost) || Strings.hasText(proxyUserName) || Strings.hasText(proxyPassword))) { + throw new SettingsException( + "Google Cloud Storage proxy port or host or username or password have been set but proxy type is not defined." + ); + } + if (proxyType != Proxy.Type.DIRECT && (proxyPort == 0 || Strings.isEmpty(proxyHost))) { + throw new SettingsException("Google Cloud Storage proxy type has been set but proxy host or port is not defined."); + } + if (proxyType == Proxy.Type.DIRECT) { + return ProxySettings.NO_PROXY_SETTINGS; + } + + try { + final InetAddress proxyHostAddress = InetAddress.getByName(proxyHost); + return new ProxySettings(proxyType, proxyHostAddress, proxyPort, proxyUserName.toString(), proxyPassword.toString()); + } catch (final UnknownHostException e) { + throw new SettingsException("Google Cloud Storage proxy host is unknown.", e); + } + } + /** * Loads the service account file corresponding to a given client name. If no * file is defined for the client, a {@code null} credential is returned. diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStoragePlugin.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStoragePlugin.java index 7d51a6196e4c8..4908b26649b1b 100644 --- a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStoragePlugin.java +++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStoragePlugin.java @@ -92,7 +92,12 @@ public List> getSettings() { GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING, GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING, GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING, - GoogleCloudStorageClientSettings.TOKEN_URI_SETTING + GoogleCloudStorageClientSettings.TOKEN_URI_SETTING, + GoogleCloudStorageClientSettings.PROXY_TYPE_SETTING, + GoogleCloudStorageClientSettings.PROXY_HOST_SETTING, + GoogleCloudStorageClientSettings.PROXY_PORT_SETTING, + GoogleCloudStorageClientSettings.PROXY_USERNAME_SETTING, + GoogleCloudStorageClientSettings.PROXY_PASSWORD_SETTING ); } diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageService.java index 8208dcfe597ff..f4b501327d52c 100644 --- a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageService.java @@ -50,6 +50,9 @@ import org.opensearch.common.unit.TimeValue; import java.io.IOException; +import java.net.Authenticator; +import java.net.PasswordAuthentication; +import java.net.Proxy; import java.net.URI; import java.util.Map; @@ -142,13 +145,7 @@ synchronized void closeRepositoryClient(String repositoryName) { */ private Storage createClient(GoogleCloudStorageClientSettings clientSettings, GoogleCloudStorageOperationsStats stats) throws IOException { - final HttpTransport httpTransport = SocketAccess.doPrivilegedIOException(() -> { - final NetHttpTransport.Builder builder = new NetHttpTransport.Builder(); - // requires java.lang.RuntimePermission "setFactory" - // Pin the TLS trust certificates. - builder.trustCertificates(GoogleUtils.getCertificateTrustStore()); - return builder.build(); - }); + final HttpTransport httpTransport = createHttpTransport(clientSettings); final GoogleCloudStorageHttpStatsCollector httpStatsCollector = new GoogleCloudStorageHttpStatsCollector(stats); @@ -175,6 +172,28 @@ public HttpRequestInitializer getHttpRequestInitializer(ServiceOptions ser return storageOptions.getService(); } + private HttpTransport createHttpTransport(final GoogleCloudStorageClientSettings clientSettings) throws IOException { + return SocketAccess.doPrivilegedIOException(() -> { + final NetHttpTransport.Builder builder = new NetHttpTransport.Builder(); + // requires java.lang.RuntimePermission "setFactory" + // Pin the TLS trust certificates. + builder.trustCertificates(GoogleUtils.getCertificateTrustStore()); + final ProxySettings proxySettings = clientSettings.getProxySettings(); + if (proxySettings != ProxySettings.NO_PROXY_SETTINGS) { + if (proxySettings.isAuthenticated()) { + Authenticator.setDefault(new Authenticator() { + @Override + protected PasswordAuthentication getPasswordAuthentication() { + return new PasswordAuthentication(proxySettings.getUsername(), proxySettings.getPassword().toCharArray()); + } + }); + } + builder.setProxy(new Proxy(proxySettings.getType(), proxySettings.getAddress())); + } + return builder.build(); + }); + } + StorageOptions createStorageOptions( final GoogleCloudStorageClientSettings clientSettings, final HttpTransportOptions httpTransportOptions diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/ProxySettings.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/ProxySettings.java new file mode 100644 index 0000000000000..ddc6446d2c8c5 --- /dev/null +++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/ProxySettings.java @@ -0,0 +1,80 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.repositories.gcs; + +import org.opensearch.common.Strings; + +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.Proxy; +import java.util.Objects; + +public class ProxySettings { + + public static final ProxySettings NO_PROXY_SETTINGS = new ProxySettings(Proxy.Type.DIRECT, null, -1, null, null); + + private final Proxy.Type type; + + private final InetAddress host; + + private final String username; + + private final String password; + + private final int port; + + public ProxySettings(final Proxy.Type type, final InetAddress host, final int port, final String username, final String password) { + this.type = type; + this.host = host; + this.port = port; + this.username = username; + this.password = password; + } + + public Proxy.Type getType() { + return this.type; + } + + public InetSocketAddress getAddress() { + return new InetSocketAddress(host, port); + } + + public String getUsername() { + return this.username; + } + + public String getPassword() { + return this.password; + } + + public boolean isAuthenticated() { + return Strings.isNullOrEmpty(username) == false && Strings.isNullOrEmpty(password) == false; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + final ProxySettings that = (ProxySettings) o; + return port == that.port + && type == that.type + && Objects.equals(host, that.host) + && Objects.equals(username, that.username) + && Objects.equals(password, that.password); + } + + @Override + public int hashCode() { + return Objects.hash(type, host, username, password, port); + } +} diff --git a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy index a6e2299f52f33..48af969b04dc3 100644 --- a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy @@ -40,4 +40,7 @@ grant { // gcs client opens socket connections for to access repository permission java.net.SocketPermission "*", "connect"; + + // gcs client set Authenticator for proxy username/password + permission java.net.NetPermission "setDefaultAuthenticator"; }; diff --git a/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java b/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java index 8dbf6b0ff2873..abf63e5525d4d 100644 --- a/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java +++ b/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java @@ -38,9 +38,13 @@ import org.opensearch.common.settings.MockSecureSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; +import org.opensearch.common.settings.SettingsException; import org.opensearch.common.unit.TimeValue; import org.opensearch.test.OpenSearchTestCase; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.Proxy; import java.net.URI; import java.nio.charset.StandardCharsets; import java.security.KeyPair; @@ -92,6 +96,7 @@ public void testLoad() throws Exception { assertEquals(expectedClientSettings.getConnectTimeout(), actualClientSettings.getConnectTimeout()); assertEquals(expectedClientSettings.getReadTimeout(), actualClientSettings.getReadTimeout()); assertEquals(expectedClientSettings.getApplicationName(), actualClientSettings.getApplicationName()); + assertEquals(ProxySettings.NO_PROXY_SETTINGS, actualClientSettings.getProxySettings()); } if (deprecationWarnings.isEmpty() == false) { @@ -118,11 +123,131 @@ public void testProjectIdDefaultsToCredentials() throws Exception { CONNECT_TIMEOUT_SETTING.getDefault(Settings.EMPTY), READ_TIMEOUT_SETTING.getDefault(Settings.EMPTY), APPLICATION_NAME_SETTING.getDefault(Settings.EMPTY), - new URI("") + new URI(""), + new ProxySettings(Proxy.Type.DIRECT, null, 0, null, null) ); assertEquals(credential.getProjectId(), googleCloudStorageClientSettings.getProjectId()); } + public void testHttpProxySettings() throws Exception { + final int port = randomIntBetween(10, 1080); + final String userName = randomAlphaOfLength(10); + final String password = randomAlphaOfLength(10); + final GoogleCloudStorageClientSettings gcsWithHttpProxyWithoutUserPwd = proxyGoogleCloudStorageClientSettings( + new ProxySettings(Proxy.Type.HTTP, InetAddress.getByName("127.0.0.10"), port, null, null) + ); + + assertEquals(Proxy.Type.HTTP, gcsWithHttpProxyWithoutUserPwd.getProxySettings().getType()); + assertEquals( + new InetSocketAddress(InetAddress.getByName("127.0.0.10"), port), + gcsWithHttpProxyWithoutUserPwd.getProxySettings().getAddress() + ); + assertNull(gcsWithHttpProxyWithoutUserPwd.getProxySettings().getUsername()); + assertNull(gcsWithHttpProxyWithoutUserPwd.getProxySettings().getPassword()); + assertFalse(gcsWithHttpProxyWithoutUserPwd.getProxySettings().isAuthenticated()); + + final GoogleCloudStorageClientSettings gcsWithHttpProxyWithUserPwd = proxyGoogleCloudStorageClientSettings( + new ProxySettings(Proxy.Type.HTTP, InetAddress.getByName("127.0.0.10"), port, userName, password) + ); + + assertEquals(Proxy.Type.HTTP, gcsWithHttpProxyWithoutUserPwd.getProxySettings().getType()); + assertEquals( + new InetSocketAddress(InetAddress.getByName("127.0.0.10"), port), + gcsWithHttpProxyWithUserPwd.getProxySettings().getAddress() + ); + assertTrue(gcsWithHttpProxyWithUserPwd.getProxySettings().isAuthenticated()); + assertEquals(userName, gcsWithHttpProxyWithUserPwd.getProxySettings().getUsername()); + assertEquals(password, gcsWithHttpProxyWithUserPwd.getProxySettings().getPassword()); + } + + public void testSocksProxySettings() throws Exception { + final int port = randomIntBetween(10, 1080); + final String userName = randomAlphaOfLength(10); + final String password = randomAlphaOfLength(10); + final GoogleCloudStorageClientSettings gcsWithHttpProxyWithoutUserPwd = proxyGoogleCloudStorageClientSettings( + new ProxySettings(Proxy.Type.SOCKS, InetAddress.getByName("127.0.0.10"), port, null, null) + ); + + assertEquals(Proxy.Type.SOCKS, gcsWithHttpProxyWithoutUserPwd.getProxySettings().getType()); + assertEquals( + new InetSocketAddress(InetAddress.getByName("127.0.0.10"), port), + gcsWithHttpProxyWithoutUserPwd.getProxySettings().getAddress() + ); + assertFalse(gcsWithHttpProxyWithoutUserPwd.getProxySettings().isAuthenticated()); + assertNull(gcsWithHttpProxyWithoutUserPwd.getProxySettings().getUsername()); + assertNull(gcsWithHttpProxyWithoutUserPwd.getProxySettings().getPassword()); + + final GoogleCloudStorageClientSettings gcsWithHttpProxyWithUserPwd = proxyGoogleCloudStorageClientSettings( + new ProxySettings(Proxy.Type.SOCKS, InetAddress.getByName("127.0.0.10"), port, userName, password) + ); + + assertEquals(Proxy.Type.SOCKS, gcsWithHttpProxyWithoutUserPwd.getProxySettings().getType()); + assertEquals( + new InetSocketAddress(InetAddress.getByName("127.0.0.10"), port), + gcsWithHttpProxyWithUserPwd.getProxySettings().getAddress() + ); + assertTrue(gcsWithHttpProxyWithUserPwd.getProxySettings().isAuthenticated()); + assertEquals(userName, gcsWithHttpProxyWithUserPwd.getProxySettings().getUsername()); + assertEquals(password, gcsWithHttpProxyWithUserPwd.getProxySettings().getPassword()); + } + + public void testProxyWrongHost() { + final Settings settings = Settings.builder() + .put("gcs.client.default.proxy.type", randomFrom("socks", "http")) + .put("gcs.client.default.proxy.host", "thisisnotavalidhostorwehavebeensuperunlucky") + .put("gcs.client.default.proxy.port", 8080) + .build(); + final SettingsException e = expectThrows(SettingsException.class, () -> GoogleCloudStorageClientSettings.load(settings)); + assertEquals("Google Cloud Storage proxy host is unknown.", e.getMessage()); + } + + public void testProxyTypeNotSet() { + final Settings hostPortSettings = Settings.builder() + .put("gcs.client.default.proxy.host", "127.0.0.1") + .put("gcs.client.default.proxy.port", 8080) + .build(); + + SettingsException e = expectThrows(SettingsException.class, () -> GoogleCloudStorageClientSettings.load(hostPortSettings)); + assertEquals( + "Google Cloud Storage proxy port or host or username or password have been set but proxy type is not defined.", + e.getMessage() + ); + + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("gcs.client.default.proxy.username", "aaaa"); + secureSettings.setString("gcs.client.default.proxy.password", "bbbb"); + final Settings usernamePasswordSettings = Settings.builder().setSecureSettings(secureSettings).build(); + + e = expectThrows(SettingsException.class, () -> GoogleCloudStorageClientSettings.load(usernamePasswordSettings)); + assertEquals( + "Google Cloud Storage proxy port or host or username or password have been set but proxy type is not defined.", + e.getMessage() + ); + } + + public void testProxyHostNotSet() { + final Settings settings = Settings.builder() + .put("gcs.client.default.proxy.port", 8080) + .put("gcs.client.default.proxy.type", randomFrom("socks", "http")) + .build(); + final SettingsException e = expectThrows(SettingsException.class, () -> GoogleCloudStorageClientSettings.load(settings)); + assertEquals("Google Cloud Storage proxy type has been set but proxy host or port is not defined.", e.getMessage()); + } + + private GoogleCloudStorageClientSettings proxyGoogleCloudStorageClientSettings(final ProxySettings proxySettings) throws Exception { + final String clientName = randomAlphaOfLength(5); + return new GoogleCloudStorageClientSettings( + randomCredential(clientName).v1(), + ENDPOINT_SETTING.getDefault(Settings.EMPTY), + PROJECT_ID_SETTING.getDefault(Settings.EMPTY), + CONNECT_TIMEOUT_SETTING.getDefault(Settings.EMPTY), + READ_TIMEOUT_SETTING.getDefault(Settings.EMPTY), + APPLICATION_NAME_SETTING.getDefault(Settings.EMPTY), + new URI(""), + proxySettings + ); + } + /** Generates a given number of GoogleCloudStorageClientSettings along with the Settings to build them from **/ private Tuple, Settings> randomClients( final int nbClients, @@ -216,7 +341,8 @@ private static GoogleCloudStorageClientSettings randomClient( connectTimeout, readTimeout, applicationName, - new URI("") + new URI(""), + new ProxySettings(Proxy.Type.DIRECT, null, 0, null, null) ); } diff --git a/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageServiceTests.java b/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageServiceTests.java index 7792a5f51c459..c5a3a26be082f 100644 --- a/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageServiceTests.java +++ b/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageServiceTests.java @@ -35,7 +35,7 @@ import com.google.auth.Credentials; import com.google.cloud.http.HttpTransportOptions; import com.google.cloud.storage.Storage; - +import org.hamcrest.Matchers; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.settings.MockSecureSettings; import org.opensearch.common.settings.Setting; @@ -43,7 +43,6 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.test.OpenSearchTestCase; -import org.hamcrest.Matchers; import java.security.KeyPair; import java.security.KeyPairGenerator; @@ -51,9 +50,9 @@ import java.util.Locale; import java.util.UUID; -import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; public class GoogleCloudStorageServiceTests extends OpenSearchTestCase { diff --git a/plugins/repository-gcs/src/yamlRestTest/resources/rest-api-spec/test/repository_gcs/20_repository.yml b/plugins/repository-gcs/src/yamlRestTest/resources/rest-api-spec/test/repository_gcs/20_repository.yml index dfd0ecc5788b1..f087a004efdf2 100644 --- a/plugins/repository-gcs/src/yamlRestTest/resources/rest-api-spec/test/repository_gcs/20_repository.yml +++ b/plugins/repository-gcs/src/yamlRestTest/resources/rest-api-spec/test/repository_gcs/20_repository.yml @@ -48,17 +48,14 @@ setup: body: - index: _index: docs - _type: doc _id: 1 - snapshot: one - index: _index: docs - _type: doc _id: 2 - snapshot: one - index: _index: docs - _type: doc _id: 3 - snapshot: one @@ -96,22 +93,18 @@ setup: body: - index: _index: docs - _type: doc _id: 4 - snapshot: two - index: _index: docs - _type: doc _id: 5 - snapshot: two - index: _index: docs - _type: doc _id: 6 - snapshot: two - index: _index: docs - _type: doc _id: 7 - snapshot: two diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 3cfe70b6d89a1..c9c7c8e6ffced 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -65,7 +65,7 @@ dependencies { api "org.apache.logging.log4j:log4j-core:${versions.log4j}" api 'org.apache.avro:avro:1.10.2' api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}" - api 'com.google.code.gson:gson:2.8.9' + api 'com.google.code.gson:gson:2.9.0' runtimeOnly 'com.google.guava:guava:30.1.1-jre' api 'com.google.protobuf:protobuf-java:3.19.3' api 'commons-logging:commons-logging:1.1.3' @@ -74,7 +74,7 @@ dependencies { api 'commons-collections:commons-collections:3.2.2' api 'org.apache.commons:commons-compress:1.21' api 'org.apache.commons:commons-configuration2:2.7' - api 'commons-io:commons-io:2.7' + api 'commons-io:commons-io:2.11.0' api 'org.apache.commons:commons-lang3:3.7' implementation 'com.google.re2j:re2j:1.1' api 'javax.servlet:servlet-api:2.5' @@ -83,7 +83,7 @@ dependencies { api 'net.minidev:json-smart:2.4.7' api 'org.apache.zookeeper:zookeeper:3.7.0' api "io.netty:netty-all:${versions.netty}" - implementation 'com.fasterxml.woodstox:woodstox-core:6.1.1' + implementation 'com.fasterxml.woodstox:woodstox-core:6.2.8' implementation 'org.codehaus.woodstox:stax2-api:4.2.1' hdfsFixture project(':test:fixtures:hdfs-fixture') diff --git a/plugins/repository-hdfs/licenses/commons-io-2.11.0.jar.sha1 b/plugins/repository-hdfs/licenses/commons-io-2.11.0.jar.sha1 new file mode 100644 index 0000000000000..8adec30bade49 --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-io-2.11.0.jar.sha1 @@ -0,0 +1 @@ +a2503f302b11ebde7ebc3df41daebe0e4eea3689 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/commons-io-2.7.jar.sha1 b/plugins/repository-hdfs/licenses/commons-io-2.7.jar.sha1 deleted file mode 100644 index bbb1b15dd1e1e..0000000000000 --- a/plugins/repository-hdfs/licenses/commons-io-2.7.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3f2bd4ba11c4162733c13cc90ca7c7ea09967102 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/gson-2.8.9.jar.sha1 b/plugins/repository-hdfs/licenses/gson-2.8.9.jar.sha1 deleted file mode 100644 index f7a8108d8c8e6..0000000000000 --- a/plugins/repository-hdfs/licenses/gson-2.8.9.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8a432c1d6825781e21a02db2e2c33c5fde2833b9 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/gson-2.9.0.jar.sha1 b/plugins/repository-hdfs/licenses/gson-2.9.0.jar.sha1 new file mode 100644 index 0000000000000..8e9626b0c949b --- /dev/null +++ b/plugins/repository-hdfs/licenses/gson-2.9.0.jar.sha1 @@ -0,0 +1 @@ +8a1167e089096758b49f9b34066ef98b2f4b37aa \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/woodstox-core-6.1.1.jar.sha1 b/plugins/repository-hdfs/licenses/woodstox-core-6.1.1.jar.sha1 deleted file mode 100644 index f2ad1c80882d3..0000000000000 --- a/plugins/repository-hdfs/licenses/woodstox-core-6.1.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -989bb31963ed1758b95c7c4381a91592a9a8df61 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/woodstox-core-6.2.8.jar.sha1 b/plugins/repository-hdfs/licenses/woodstox-core-6.2.8.jar.sha1 new file mode 100644 index 0000000000000..ae65cdebf26de --- /dev/null +++ b/plugins/repository-hdfs/licenses/woodstox-core-6.2.8.jar.sha1 @@ -0,0 +1 @@ +670748292899c53b1963730d9eb7f8ab71314e90 \ No newline at end of file diff --git a/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsTests.java b/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsTests.java index 02350499b1466..46d97f41b604f 100644 --- a/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsTests.java @@ -88,9 +88,9 @@ public void testSimpleWorkflow() { logger.info("--> indexing some data"); for (int i = 0; i < 100; i++) { - client().prepareIndex("test-idx-1", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); - client().prepareIndex("test-idx-2", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); - client().prepareIndex("test-idx-3", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-1").setId(Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-2").setId(Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-3").setId(Integer.toString(i)).setSource("foo", "bar" + i).get(); } client().admin().indices().prepareRefresh().get(); assertThat(count(client, "test-idx-1"), equalTo(100L)); @@ -117,13 +117,13 @@ public void testSimpleWorkflow() { logger.info("--> delete some data"); for (int i = 0; i < 50; i++) { - client.prepareDelete("test-idx-1", "doc", Integer.toString(i)).get(); + client.prepareDelete("test-idx-1", Integer.toString(i)).get(); } for (int i = 50; i < 100; i++) { - client.prepareDelete("test-idx-2", "doc", Integer.toString(i)).get(); + client.prepareDelete("test-idx-2", Integer.toString(i)).get(); } for (int i = 0; i < 100; i += 2) { - client.prepareDelete("test-idx-3", "doc", Integer.toString(i)).get(); + client.prepareDelete("test-idx-3", Integer.toString(i)).get(); } client().admin().indices().prepareRefresh().get(); assertThat(count(client, "test-idx-1"), equalTo(50L)); diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 1ebd6c5c50ffe..c5939958c816a 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -64,8 +64,8 @@ dependencies { api "joda-time:joda-time:${versions.joda}" // HACK: javax.xml.bind was removed from default modules in java 9, so we pull the api in here, - // and whitelist this hack in JarHell - api 'javax.xml.bind:jaxb-api:2.2.2' + // and allowlist this hack in JarHell + api 'javax.xml.bind:jaxb-api:2.3.1' testImplementation project(':test:fixtures:s3-fixture') } diff --git a/plugins/repository-s3/licenses/jaxb-api-2.2.2.jar.sha1 b/plugins/repository-s3/licenses/jaxb-api-2.2.2.jar.sha1 deleted file mode 100644 index a37e187238933..0000000000000 --- a/plugins/repository-s3/licenses/jaxb-api-2.2.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -aeb3021ca93dde265796d82015beecdcff95bf09 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/jaxb-api-2.3.1.jar.sha1 b/plugins/repository-s3/licenses/jaxb-api-2.3.1.jar.sha1 new file mode 100644 index 0000000000000..f4434214e1eec --- /dev/null +++ b/plugins/repository-s3/licenses/jaxb-api-2.3.1.jar.sha1 @@ -0,0 +1 @@ +8531ad5ac454cc2deb9d4d32c40c4d7451939b5d \ No newline at end of file diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/AmazonS3Reference.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/AmazonS3Reference.java index 239918206f397..62e415705a011 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/AmazonS3Reference.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/AmazonS3Reference.java @@ -32,45 +32,17 @@ package org.opensearch.repositories.s3; -import org.opensearch.common.util.concurrent.AbstractRefCounted; - import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Client; - -import org.opensearch.common.lease.Releasable; +import org.opensearch.common.concurrent.RefCountedReleasable; /** * Handles the shutdown of the wrapped {@link AmazonS3Client} using reference * counting. */ -public class AmazonS3Reference extends AbstractRefCounted implements Releasable { - - private final AmazonS3 client; +public class AmazonS3Reference extends RefCountedReleasable { AmazonS3Reference(AmazonS3 client) { - super("AWS_S3_CLIENT"); - this.client = client; - } - - /** - * Call when the client is not needed anymore. - */ - @Override - public void close() { - decRef(); + super("AWS_S3_CLIENT", client, client::shutdown); } - - /** - * Returns the underlying `AmazonS3` client. All method calls are permitted BUT - * NOT shutdown. Shutdown is called when reference count reaches 0. - */ - public AmazonS3 client() { - return client; - } - - @Override - protected void closeInternal() { - client.shutdown(); - } - } diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/ProxySettings.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/ProxySettings.java new file mode 100644 index 0000000000000..430af0096d8b5 --- /dev/null +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/ProxySettings.java @@ -0,0 +1,123 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.repositories.s3; + +import com.amazonaws.Protocol; +import org.opensearch.common.Strings; +import org.opensearch.common.settings.SettingsException; + +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.UnknownHostException; +import java.util.Objects; + +public class ProxySettings { + public static final ProxySettings NO_PROXY_SETTINGS = new ProxySettings(ProxyType.DIRECT, null, -1, null, null); + + public static enum ProxyType { + HTTP(Protocol.HTTP.name()), + HTTPS(Protocol.HTTPS.name()), + SOCKS("SOCKS"), + DIRECT("DIRECT"); + + private final String name; + + private ProxyType(String name) { + this.name = name; + } + + public Protocol toProtocol() { + if (this == DIRECT) { + // We check it in settings, + // the probability that it could be thrown is small, but how knows + throw new SettingsException("Couldn't convert to S3 protocol"); + } else if (this == SOCKS) { + throw new SettingsException("Couldn't convert to S3 protocol. SOCKS is not supported"); + } + return Protocol.valueOf(name()); + } + + } + + private final ProxyType type; + + private final String host; + + private final String username; + + private final String password; + + private final int port; + + public String getHost() { + return host; + } + + public ProxySettings(final ProxyType type, final String host, final int port, final String username, final String password) { + this.type = type; + this.host = host; + this.port = port; + this.username = username; + this.password = password; + } + + public ProxyType getType() { + return this.type; + } + + public String getHostName() { + return host; + } + + public int getPort() { + return port; + } + + public InetSocketAddress getAddress() { + try { + return new InetSocketAddress(InetAddress.getByName(host), port); + } catch (UnknownHostException e) { + // this error won't be thrown since validation of the host name is in the S3ClientSettings + throw new RuntimeException(e); + } + } + + public String getUsername() { + return this.username; + } + + public String getPassword() { + return this.password; + } + + public boolean isAuthenticated() { + return Strings.isNullOrEmpty(username) == false && Strings.isNullOrEmpty(password) == false; + } + + public ProxySettings recreateWithNewHostAndPort(final String host, final int port) { + return new ProxySettings(type, host, port, username, password); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + final ProxySettings that = (ProxySettings) o; + return port == that.port + && type == that.type + && Objects.equals(host, that.host) + && Objects.equals(username, that.username) + && Objects.equals(password, that.password); + } + + @Override + public int hashCode() { + return Objects.hash(type, host, username, password, port); + } +} diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java index 5a9c03c0b2a37..678be7c6f13f2 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java @@ -101,7 +101,7 @@ class S3BlobContainer extends AbstractBlobContainer { @Override public boolean blobExists(String blobName) { try (AmazonS3Reference clientReference = blobStore.clientReference()) { - return SocketAccess.doPrivileged(() -> clientReference.client().doesObjectExist(blobStore.bucket(), buildKey(blobName))); + return SocketAccess.doPrivileged(() -> clientReference.get().doesObjectExist(blobStore.bucket(), buildKey(blobName))); } catch (final Exception e) { throw new BlobStoreException("Failed to check if blob [" + blobName + "] exists", e); } @@ -169,13 +169,13 @@ public DeleteResult delete() throws IOException { ObjectListing list; if (prevListing != null) { final ObjectListing finalPrevListing = prevListing; - list = SocketAccess.doPrivileged(() -> clientReference.client().listNextBatchOfObjects(finalPrevListing)); + list = SocketAccess.doPrivileged(() -> clientReference.get().listNextBatchOfObjects(finalPrevListing)); } else { final ListObjectsRequest listObjectsRequest = new ListObjectsRequest(); listObjectsRequest.setBucketName(blobStore.bucket()); listObjectsRequest.setPrefix(keyPath); listObjectsRequest.setRequestMetricCollector(blobStore.listMetricCollector); - list = SocketAccess.doPrivileged(() -> clientReference.client().listObjects(listObjectsRequest)); + list = SocketAccess.doPrivileged(() -> clientReference.get().listObjects(listObjectsRequest)); } final List blobsToDelete = new ArrayList<>(); list.getObjectSummaries().forEach(s3ObjectSummary -> { @@ -236,7 +236,7 @@ private void doDeleteBlobs(List blobNames, boolean relative) throws IOEx .map(DeleteObjectsRequest.KeyVersion::getKey) .collect(Collectors.toList()); try { - clientReference.client().deleteObjects(deleteRequest); + clientReference.get().deleteObjects(deleteRequest); outstanding.removeAll(keysInRequest); } catch (MultiObjectDeleteException e) { // We are sending quiet mode requests so we can't use the deleted keys entry on the exception and instead @@ -324,9 +324,9 @@ private static List executeListing(AmazonS3Reference clientRefere ObjectListing list; if (prevListing != null) { final ObjectListing finalPrevListing = prevListing; - list = SocketAccess.doPrivileged(() -> clientReference.client().listNextBatchOfObjects(finalPrevListing)); + list = SocketAccess.doPrivileged(() -> clientReference.get().listNextBatchOfObjects(finalPrevListing)); } else { - list = SocketAccess.doPrivileged(() -> clientReference.client().listObjects(listObjectsRequest)); + list = SocketAccess.doPrivileged(() -> clientReference.get().listObjects(listObjectsRequest)); } results.add(list); if (list.isTruncated()) { @@ -374,7 +374,7 @@ void executeSingleUpload(final S3BlobStore blobStore, final String blobName, fin putRequest.setRequestMetricCollector(blobStore.putMetricCollector); try (AmazonS3Reference clientReference = blobStore.clientReference()) { - SocketAccess.doPrivilegedVoid(() -> { clientReference.client().putObject(putRequest); }); + SocketAccess.doPrivilegedVoid(() -> { clientReference.get().putObject(putRequest); }); } catch (final AmazonClientException e) { throw new IOException("Unable to upload object [" + blobName + "] using a single upload", e); } @@ -413,7 +413,7 @@ void executeMultipartUpload(final S3BlobStore blobStore, final String blobName, } try (AmazonS3Reference clientReference = blobStore.clientReference()) { - uploadId.set(SocketAccess.doPrivileged(() -> clientReference.client().initiateMultipartUpload(initRequest).getUploadId())); + uploadId.set(SocketAccess.doPrivileged(() -> clientReference.get().initiateMultipartUpload(initRequest).getUploadId())); if (Strings.isEmpty(uploadId.get())) { throw new IOException("Failed to initialize multipart upload " + blobName); } @@ -439,7 +439,7 @@ void executeMultipartUpload(final S3BlobStore blobStore, final String blobName, } bytesCount += uploadRequest.getPartSize(); - final UploadPartResult uploadResponse = SocketAccess.doPrivileged(() -> clientReference.client().uploadPart(uploadRequest)); + final UploadPartResult uploadResponse = SocketAccess.doPrivileged(() -> clientReference.get().uploadPart(uploadRequest)); parts.add(uploadResponse.getPartETag()); } @@ -456,7 +456,7 @@ void executeMultipartUpload(final S3BlobStore blobStore, final String blobName, parts ); complRequest.setRequestMetricCollector(blobStore.multiPartUploadMetricCollector); - SocketAccess.doPrivilegedVoid(() -> clientReference.client().completeMultipartUpload(complRequest)); + SocketAccess.doPrivilegedVoid(() -> clientReference.get().completeMultipartUpload(complRequest)); success = true; } catch (final AmazonClientException e) { @@ -465,7 +465,7 @@ void executeMultipartUpload(final S3BlobStore blobStore, final String blobName, if ((success == false) && Strings.hasLength(uploadId.get())) { final AbortMultipartUploadRequest abortRequest = new AbortMultipartUploadRequest(bucketName, blobName, uploadId.get()); try (AmazonS3Reference clientReference = blobStore.clientReference()) { - SocketAccess.doPrivilegedVoid(() -> clientReference.client().abortMultipartUpload(abortRequest)); + SocketAccess.doPrivilegedVoid(() -> clientReference.get().abortMultipartUpload(abortRequest)); } } } diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3ClientSettings.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3ClientSettings.java index 805f48aae9b2d..e02c7cae89378 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3ClientSettings.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3ClientSettings.java @@ -34,13 +34,18 @@ import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; +import org.opensearch.common.Strings; +import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.settings.SecureSetting; import org.opensearch.common.settings.SecureString; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; +import org.opensearch.common.settings.SettingsException; import org.opensearch.common.unit.TimeValue; +import java.net.InetAddress; +import java.net.UnknownHostException; import java.util.Collections; import java.util.HashMap; import java.util.Locale; @@ -54,6 +59,8 @@ */ final class S3ClientSettings { + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(S3ClientSettings.class); + // prefix for s3 client settings private static final String PREFIX = "s3.client."; @@ -95,6 +102,13 @@ final class S3ClientSettings { key -> new Setting<>(key, "https", s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope) ); + /** The protocol to use to connect to s3. */ + static final Setting.AffixSetting PROXY_TYPE_SETTING = Setting.affixKeySetting( + PREFIX, + "proxy.type", + key -> new Setting<>(key, "direct", s -> ProxySettings.ProxyType.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope) + ); + /** The host name of a proxy to connect to s3 through. */ static final Setting.AffixSetting PROXY_HOST_SETTING = Setting.affixKeySetting( PREFIX, @@ -106,7 +120,7 @@ final class S3ClientSettings { static final Setting.AffixSetting PROXY_PORT_SETTING = Setting.affixKeySetting( PREFIX, "proxy.port", - key -> Setting.intSetting(key, 80, 0, 1 << 16, Property.NodeScope) + key -> Setting.intSetting(key, 80, 0, (1 << 16) - 1, Property.NodeScope) ); /** The username of a proxy to connect to s3 through. */ @@ -181,19 +195,8 @@ final class S3ClientSettings { /** The protocol to use to talk to s3. Defaults to https. */ final Protocol protocol; - /** An optional proxy host that requests to s3 should be made through. */ - final String proxyHost; - - /** The port number the proxy host should be connected on. */ - final int proxyPort; - - // these should be "secure" yet the api for the s3 client only takes String, so storing them - // as SecureString here won't really help with anything - /** An optional username for the proxy host, for basic authentication. */ - final String proxyUsername; - - /** An optional password for the proxy host, for basic authentication. */ - final String proxyPassword; + /** An optional proxy settings that requests to s3 should be made through. */ + final ProxySettings proxySettings; /** The read timeout for the s3 client. */ final int readTimeoutMillis; @@ -220,25 +223,18 @@ private S3ClientSettings( S3BasicCredentials credentials, String endpoint, Protocol protocol, - String proxyHost, - int proxyPort, - String proxyUsername, - String proxyPassword, int readTimeoutMillis, int maxRetries, boolean throttleRetries, boolean pathStyleAccess, boolean disableChunkedEncoding, String region, - String signerOverride + String signerOverride, + ProxySettings proxySettings ) { this.credentials = credentials; this.endpoint = endpoint; this.protocol = protocol; - this.proxyHost = proxyHost; - this.proxyPort = proxyPort; - this.proxyUsername = proxyUsername; - this.proxyPassword = proxyPassword; this.readTimeoutMillis = readTimeoutMillis; this.maxRetries = maxRetries; this.throttleRetries = throttleRetries; @@ -246,6 +242,7 @@ private S3ClientSettings( this.disableChunkedEncoding = disableChunkedEncoding; this.region = region; this.signerOverride = signerOverride; + this.proxySettings = proxySettings; } /** @@ -263,8 +260,10 @@ S3ClientSettings refine(Settings repositorySettings) { final String newEndpoint = getRepoSettingOrDefault(ENDPOINT_SETTING, normalizedSettings, endpoint); final Protocol newProtocol = getRepoSettingOrDefault(PROTOCOL_SETTING, normalizedSettings, protocol); - final String newProxyHost = getRepoSettingOrDefault(PROXY_HOST_SETTING, normalizedSettings, proxyHost); - final int newProxyPort = getRepoSettingOrDefault(PROXY_PORT_SETTING, normalizedSettings, proxyPort); + + final String newProxyHost = getRepoSettingOrDefault(PROXY_HOST_SETTING, normalizedSettings, proxySettings.getHostName()); + final int newProxyPort = getRepoSettingOrDefault(PROXY_PORT_SETTING, normalizedSettings, proxySettings.getPort()); + final int newReadTimeoutMillis = Math.toIntExact( getRepoSettingOrDefault(READ_TIMEOUT_SETTING, normalizedSettings, TimeValue.timeValueMillis(readTimeoutMillis)).millis() ); @@ -286,8 +285,8 @@ S3ClientSettings refine(Settings repositorySettings) { final String newSignerOverride = getRepoSettingOrDefault(SIGNER_OVERRIDE, normalizedSettings, signerOverride); if (Objects.equals(endpoint, newEndpoint) && protocol == newProtocol - && Objects.equals(proxyHost, newProxyHost) - && proxyPort == newProxyPort + && Objects.equals(proxySettings.getHostName(), newProxyHost) + && proxySettings.getPort() == newProxyPort && newReadTimeoutMillis == readTimeoutMillis && maxRetries == newMaxRetries && newThrottleRetries == throttleRetries @@ -298,21 +297,20 @@ S3ClientSettings refine(Settings repositorySettings) { && Objects.equals(signerOverride, newSignerOverride)) { return this; } + + validateInetAddressFor(newProxyHost); return new S3ClientSettings( newCredentials, newEndpoint, newProtocol, - newProxyHost, - newProxyPort, - proxyUsername, - proxyPassword, newReadTimeoutMillis, newMaxRetries, newThrottleRetries, newPathStyleAccess, newDisableChunkedEncoding, newRegion, - newSignerOverride + newSignerOverride, + proxySettings.recreateWithNewHostAndPort(newProxyHost, newProxyPort) ); } @@ -401,27 +399,69 @@ private static S3BasicCredentials loadCredentials(Settings settings, String clie // pkg private for tests /** Parse settings for a single client. */ static S3ClientSettings getClientSettings(final Settings settings, final String clientName) { - try ( - SecureString proxyUsername = getConfigValue(settings, clientName, PROXY_USERNAME_SETTING); - SecureString proxyPassword = getConfigValue(settings, clientName, PROXY_PASSWORD_SETTING) - ) { - return new S3ClientSettings( - S3ClientSettings.loadCredentials(settings, clientName), - getConfigValue(settings, clientName, ENDPOINT_SETTING), - getConfigValue(settings, clientName, PROTOCOL_SETTING), - getConfigValue(settings, clientName, PROXY_HOST_SETTING), - getConfigValue(settings, clientName, PROXY_PORT_SETTING), - proxyUsername.toString(), - proxyPassword.toString(), - Math.toIntExact(getConfigValue(settings, clientName, READ_TIMEOUT_SETTING).millis()), - getConfigValue(settings, clientName, MAX_RETRIES_SETTING), - getConfigValue(settings, clientName, USE_THROTTLE_RETRIES_SETTING), - getConfigValue(settings, clientName, USE_PATH_STYLE_ACCESS), - getConfigValue(settings, clientName, DISABLE_CHUNKED_ENCODING), - getConfigValue(settings, clientName, REGION), - getConfigValue(settings, clientName, SIGNER_OVERRIDE) + final Protocol awsProtocol = getConfigValue(settings, clientName, PROTOCOL_SETTING); + return new S3ClientSettings( + S3ClientSettings.loadCredentials(settings, clientName), + getConfigValue(settings, clientName, ENDPOINT_SETTING), + awsProtocol, + Math.toIntExact(getConfigValue(settings, clientName, READ_TIMEOUT_SETTING).millis()), + getConfigValue(settings, clientName, MAX_RETRIES_SETTING), + getConfigValue(settings, clientName, USE_THROTTLE_RETRIES_SETTING), + getConfigValue(settings, clientName, USE_PATH_STYLE_ACCESS), + getConfigValue(settings, clientName, DISABLE_CHUNKED_ENCODING), + getConfigValue(settings, clientName, REGION), + getConfigValue(settings, clientName, SIGNER_OVERRIDE), + validateAndCreateProxySettings(settings, clientName, awsProtocol) + ); + } + + static ProxySettings validateAndCreateProxySettings(final Settings settings, final String clientName, final Protocol awsProtocol) { + ProxySettings.ProxyType proxyType = getConfigValue(settings, clientName, PROXY_TYPE_SETTING); + final String proxyHost = getConfigValue(settings, clientName, PROXY_HOST_SETTING); + final int proxyPort = getConfigValue(settings, clientName, PROXY_PORT_SETTING); + final SecureString proxyUserName = getConfigValue(settings, clientName, PROXY_USERNAME_SETTING); + final SecureString proxyPassword = getConfigValue(settings, clientName, PROXY_PASSWORD_SETTING); + if (awsProtocol != Protocol.HTTPS && proxyType == ProxySettings.ProxyType.DIRECT && Strings.hasText(proxyHost)) { + // This is backward compatibility for the current behaviour. + // The default value for Protocol settings is HTTPS, + // The expectation of ex-developers that protocol is the same as the proxy protocol + // which is a separate setting for AWS SDK. + // In this case, proxy type should be the same as a protocol, + // when proxy host and port have been set + proxyType = ProxySettings.ProxyType.valueOf(awsProtocol.name()); + deprecationLogger.deprecate( + PROTOCOL_SETTING.getConcreteSettingForNamespace(clientName).getKey(), + "Using of " + + PROTOCOL_SETTING.getConcreteSettingForNamespace(clientName).getKey() + + " as proxy type is deprecated and will be removed in future releases. Please use " + + PROXY_TYPE_SETTING.getConcreteSettingForNamespace(clientName).getKey() + + " instead to specify proxy type." ); } + // Validate proxy settings + if (proxyType == ProxySettings.ProxyType.DIRECT + && (proxyPort != 80 || Strings.hasText(proxyHost) || Strings.hasText(proxyUserName) || Strings.hasText(proxyPassword))) { + throw new SettingsException("S3 proxy port or host or username or password have been set but proxy type is not defined."); + } + if (proxyType != ProxySettings.ProxyType.DIRECT && Strings.isEmpty(proxyHost)) { + throw new SettingsException("S3 proxy type has been set but proxy host or port is not defined."); + } + if (proxyType == ProxySettings.ProxyType.DIRECT) { + return ProxySettings.NO_PROXY_SETTINGS; + } + if (awsProtocol == Protocol.HTTP && proxyType == ProxySettings.ProxyType.SOCKS) { + throw new SettingsException("SOCKS proxy is not supported for HTTP protocol"); + } + validateInetAddressFor(proxyHost); + return new ProxySettings(proxyType, proxyHost, proxyPort, proxyUserName.toString(), proxyPassword.toString()); + } + + static void validateInetAddressFor(final String proxyHost) { + try { + InetAddress.getByName(proxyHost); + } catch (final UnknownHostException e) { + throw new SettingsException("S3 proxy host is unknown.", e); + } } @Override @@ -433,16 +473,13 @@ public boolean equals(final Object o) { return false; } final S3ClientSettings that = (S3ClientSettings) o; - return proxyPort == that.proxyPort - && readTimeoutMillis == that.readTimeoutMillis + return readTimeoutMillis == that.readTimeoutMillis && maxRetries == that.maxRetries && throttleRetries == that.throttleRetries && Objects.equals(credentials, that.credentials) && Objects.equals(endpoint, that.endpoint) && protocol == that.protocol - && Objects.equals(proxyHost, that.proxyHost) - && Objects.equals(proxyUsername, that.proxyUsername) - && Objects.equals(proxyPassword, that.proxyPassword) + && proxySettings.equals(that.proxySettings) && Objects.equals(disableChunkedEncoding, that.disableChunkedEncoding) && Objects.equals(region, that.region) && Objects.equals(signerOverride, that.signerOverride); @@ -454,10 +491,7 @@ public int hashCode() { credentials, endpoint, protocol, - proxyHost, - proxyPort, - proxyUsername, - proxyPassword, + proxySettings, readTimeoutMillis, maxRetries, throttleRetries, diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3RetryingInputStream.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3RetryingInputStream.java index 82c3367679c53..388f5b8d74a2b 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3RetryingInputStream.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3RetryingInputStream.java @@ -110,7 +110,7 @@ private void openStream() throws IOException { + end; getObjectRequest.setRange(Math.addExact(start, currentOffset), end); } - final S3Object s3Object = SocketAccess.doPrivileged(() -> clientReference.client().getObject(getObjectRequest)); + final S3Object s3Object = SocketAccess.doPrivileged(() -> clientReference.get().getObject(getObjectRequest)); this.currentStreamLastOffset = Math.addExact(Math.addExact(start, currentOffset), getStreamLength(s3Object)); this.currentStream = s3Object.getObjectContent(); } catch (final AmazonClientException e) { diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Service.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Service.java index 1f5cb2a752eef..3ce19378ac05c 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Service.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Service.java @@ -39,10 +39,16 @@ import com.amazonaws.auth.EC2ContainerCredentialsProviderWrapper; import com.amazonaws.client.builder.AwsClientBuilder; import com.amazonaws.http.IdleConnectionReaper; +import com.amazonaws.http.SystemPropertyTlsKeyManagersProvider; +import com.amazonaws.http.conn.ssl.SdkTLSSocketFactory; +import com.amazonaws.internal.SdkSSLContext; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.amazonaws.services.s3.internal.Constants; +import org.apache.http.conn.ssl.DefaultHostnameVerifier; +import org.apache.http.conn.ssl.SSLConnectionSocketFactory; +import org.apache.http.protocol.HttpContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.cluster.metadata.RepositoryMetadata; @@ -50,7 +56,15 @@ import org.opensearch.common.collect.MapBuilder; import org.opensearch.common.settings.Settings; +import javax.net.ssl.SSLContext; import java.io.Closeable; +import java.io.IOException; +import java.net.Authenticator; +import java.net.InetSocketAddress; +import java.net.PasswordAuthentication; +import java.net.Proxy; +import java.net.Socket; +import java.security.SecureRandom; import java.util.Map; import static java.util.Collections.emptyMap; @@ -189,12 +203,32 @@ static ClientConfiguration buildConfiguration(S3ClientSettings clientSettings) { clientConfiguration.setResponseMetadataCacheSize(0); clientConfiguration.setProtocol(clientSettings.protocol); - if (Strings.hasText(clientSettings.proxyHost)) { - // TODO: remove this leniency, these settings should exist together and be validated - clientConfiguration.setProxyHost(clientSettings.proxyHost); - clientConfiguration.setProxyPort(clientSettings.proxyPort); - clientConfiguration.setProxyUsername(clientSettings.proxyUsername); - clientConfiguration.setProxyPassword(clientSettings.proxyPassword); + if (clientSettings.proxySettings != ProxySettings.NO_PROXY_SETTINGS) { + if (clientSettings.proxySettings.getType() == ProxySettings.ProxyType.SOCKS) { + SocketAccess.doPrivilegedVoid(() -> { + if (clientSettings.proxySettings.isAuthenticated()) { + Authenticator.setDefault(new Authenticator() { + @Override + protected PasswordAuthentication getPasswordAuthentication() { + return new PasswordAuthentication( + clientSettings.proxySettings.getUsername(), + clientSettings.proxySettings.getPassword().toCharArray() + ); + } + }); + } + clientConfiguration.getApacheHttpClientConfig() + .setSslSocketFactory(createSocksSslConnectionSocketFactory(clientSettings.proxySettings.getAddress())); + }); + } else { + if (clientSettings.proxySettings.getType() != ProxySettings.ProxyType.DIRECT) { + clientConfiguration.setProxyProtocol(clientSettings.proxySettings.getType().toProtocol()); + } + clientConfiguration.setProxyHost(clientSettings.proxySettings.getHostName()); + clientConfiguration.setProxyPort(clientSettings.proxySettings.getPort()); + clientConfiguration.setProxyUsername(clientSettings.proxySettings.getUsername()); + clientConfiguration.setProxyPassword(clientSettings.proxySettings.getPassword()); + } } if (Strings.hasLength(clientSettings.signerOverride)) { @@ -208,6 +242,20 @@ static ClientConfiguration buildConfiguration(S3ClientSettings clientSettings) { return clientConfiguration; } + private static SSLConnectionSocketFactory createSocksSslConnectionSocketFactory(final InetSocketAddress address) { + // This part was taken from AWS settings + final SSLContext sslCtx = SdkSSLContext.getPreferredSSLContext( + new SystemPropertyTlsKeyManagersProvider().getKeyManagers(), + new SecureRandom() + ); + return new SdkTLSSocketFactory(sslCtx, new DefaultHostnameVerifier()) { + @Override + public Socket createSocket(final HttpContext ctx) throws IOException { + return new Socket(new Proxy(Proxy.Type.SOCKS, address)); + } + }; + } + // pkg private for tests static AWSCredentialsProvider buildCredentials(Logger logger, S3ClientSettings clientSettings) { final S3BasicCredentials credentials = clientSettings.credentials; diff --git a/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy index 8c9b91418ed53..f6c154bb3b14d 100644 --- a/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy @@ -51,6 +51,9 @@ grant { // s3 client opens socket connections for to access repository permission java.net.SocketPermission "*", "connect"; + // s3 client set Authenticator for proxy username/password + permission java.net.NetPermission "setDefaultAuthenticator"; + // only for tests : org.opensearch.repositories.s3.S3RepositoryPlugin permission java.util.PropertyPermission "opensearch.allow_insecure_settings", "read,write"; }; diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/AwsS3ServiceImplTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/AwsS3ServiceImplTests.java index 0f1bfdf7b7d6b..38d9ebf337731 100644 --- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/AwsS3ServiceImplTests.java +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/AwsS3ServiceImplTests.java @@ -36,17 +36,19 @@ import com.amazonaws.Protocol; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.AWSStaticCredentialsProvider; - import org.opensearch.common.settings.MockSecureSettings; import org.opensearch.common.settings.Settings; import org.opensearch.test.OpenSearchTestCase; +import java.io.IOException; import java.util.Locale; import java.util.Map; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.opensearch.repositories.s3.S3ClientSettings.PROTOCOL_SETTING; +import static org.opensearch.repositories.s3.S3ClientSettings.PROXY_TYPE_SETTING; public class AwsS3ServiceImplTests extends OpenSearchTestCase { @@ -140,14 +142,14 @@ public void testAWSConfigurationWithAwsSettings() { final Settings settings = Settings.builder() .setSecureSettings(secureSettings) .put("s3.client.default.protocol", "http") - .put("s3.client.default.proxy.host", "aws_proxy_host") + .put("s3.client.default.proxy.host", "127.0.0.10") .put("s3.client.default.proxy.port", 8080) .put("s3.client.default.read_timeout", "10s") .build(); launchAWSConfigurationTest( settings, Protocol.HTTP, - "aws_proxy_host", + "127.0.0.10", 8080, "aws_proxy_username", "aws_proxy_password", @@ -155,6 +157,60 @@ public void testAWSConfigurationWithAwsSettings() { ClientConfiguration.DEFAULT_THROTTLE_RETRIES, 10000 ); + assertWarnings( + "Using of " + + PROTOCOL_SETTING.getConcreteSettingForNamespace("default").getKey() + + " as proxy type is deprecated and will be removed in future releases. Please use " + + PROXY_TYPE_SETTING.getConcreteSettingForNamespace("default").getKey() + + " instead to specify proxy type." + ); + } + + public void testProxyTypeOverrideProtocolSettings() { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("s3.client.default.proxy.username", "aws_proxy_username"); + secureSettings.setString("s3.client.default.proxy.password", "aws_proxy_password"); + final Settings settings = Settings.builder() + .setSecureSettings(secureSettings) + .put("s3.client.default.protocol", "http") + .put("s3.client.default.proxy.type", "https") + .put("s3.client.default.proxy.host", "127.0.0.10") + .put("s3.client.default.proxy.port", 8080) + .put("s3.client.default.read_timeout", "10s") + .build(); + launchAWSConfigurationTest( + settings, + Protocol.HTTP, + "127.0.0.10", + 8080, + "aws_proxy_username", + "aws_proxy_password", + 3, + ClientConfiguration.DEFAULT_THROTTLE_RETRIES, + 10000 + ); + } + + public void testSocksProxyConfiguration() throws IOException { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("s3.client.default.proxy.username", "aws_proxy_username"); + secureSettings.setString("s3.client.default.proxy.password", "aws_proxy_password"); + final Settings settings = Settings.builder() + .setSecureSettings(secureSettings) + .put("s3.client.default.proxy.type", "socks") + .put("s3.client.default.proxy.host", "127.0.0.10") + .put("s3.client.default.proxy.port", 8080) + .put("s3.client.default.read_timeout", "10s") + .build(); + + final S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(settings, "default"); + final ClientConfiguration configuration = S3Service.buildConfiguration(clientSettings); + + assertEquals(Protocol.HTTPS, configuration.getProtocol()); + assertEquals(Protocol.HTTP, configuration.getProxyProtocol()); // default value in SDK + assertEquals(-1, configuration.getProxyPort()); + assertNull(configuration.getProxyUsername()); + assertNull(configuration.getProxyPassword()); } public void testRepositoryMaxRetries() { diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/RepositoryCredentialsTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/RepositoryCredentialsTests.java index 645fe5cf1d134..9c359d67db88b 100644 --- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/RepositoryCredentialsTests.java +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/RepositoryCredentialsTests.java @@ -123,7 +123,7 @@ public void testRepositoryCredentialsOverrideSecureCredentials() { assertThat(repositories.repository(repositoryName), instanceOf(S3Repository.class)); final S3Repository repository = (S3Repository) repositories.repository(repositoryName); - final AmazonS3 client = repository.createBlobStore().clientReference().client(); + final AmazonS3 client = repository.createBlobStore().clientReference().get(); assertThat(client, instanceOf(ProxyS3RepositoryPlugin.ClientAndCredentials.class)); final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) client).credentials.getCredentials(); @@ -162,7 +162,7 @@ public void testReinitSecureCredentials() { final S3Repository repository = (S3Repository) repositories.repository(repositoryName); try (AmazonS3Reference clientReference = ((S3BlobStore) repository.blobStore()).clientReference()) { - final AmazonS3 client = clientReference.client(); + final AmazonS3 client = clientReference.get(); assertThat(client, instanceOf(ProxyS3RepositoryPlugin.ClientAndCredentials.class)); final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) client).credentials.getCredentials(); @@ -202,7 +202,7 @@ public void testReinitSecureCredentials() { // check credentials have been updated try (AmazonS3Reference clientReference = ((S3BlobStore) repository.blobStore()).clientReference()) { - final AmazonS3 client = clientReference.client(); + final AmazonS3 client = clientReference.get(); assertThat(client, instanceOf(ProxyS3RepositoryPlugin.ClientAndCredentials.class)); final AWSCredentials newCredentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) client).credentials.getCredentials(); diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3ClientSettingsTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3ClientSettingsTests.java index ea0b554df880e..462ed5377ff9a 100644 --- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3ClientSettingsTests.java +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3ClientSettingsTests.java @@ -37,8 +37,12 @@ import com.amazonaws.services.s3.AmazonS3Client; import org.opensearch.common.settings.MockSecureSettings; import org.opensearch.common.settings.Settings; +import org.opensearch.common.settings.SettingsException; import org.opensearch.test.OpenSearchTestCase; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.util.Locale; import java.util.Map; import static org.hamcrest.Matchers.contains; @@ -55,10 +59,7 @@ public void testThereIsADefaultClientByDefault() { assertThat(defaultSettings.credentials, nullValue()); assertThat(defaultSettings.endpoint, is(emptyString())); assertThat(defaultSettings.protocol, is(Protocol.HTTPS)); - assertThat(defaultSettings.proxyHost, is(emptyString())); - assertThat(defaultSettings.proxyPort, is(80)); - assertThat(defaultSettings.proxyUsername, is(emptyString())); - assertThat(defaultSettings.proxyPassword, is(emptyString())); + assertThat(defaultSettings.proxySettings, is(ProxySettings.NO_PROXY_SETTINGS)); assertThat(defaultSettings.readTimeoutMillis, is(ClientConfiguration.DEFAULT_SOCKET_TIMEOUT)); assertThat(defaultSettings.maxRetries, is(ClientConfiguration.DEFAULT_RETRY_POLICY.getMaxErrorRetry())); assertThat(defaultSettings.throttleRetries, is(ClientConfiguration.DEFAULT_THROTTLE_RETRIES)); @@ -215,4 +216,77 @@ public void testSignerOverrideCanBeSet() { ClientConfiguration configuration = S3Service.buildConfiguration(settings.get("other")); assertThat(configuration.getSignerOverride(), is(signerOverride)); } + + public void testSetProxySettings() throws Exception { + final int port = randomIntBetween(10, 1080); + final String userName = randomAlphaOfLength(10); + final String password = randomAlphaOfLength(10); + final String proxyType = randomFrom("http", "https", "socks"); + + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("s3.client.default.proxy.username", userName); + secureSettings.setString("s3.client.default.proxy.password", password); + + final Settings settings = Settings.builder() + .put("s3.client.default.proxy.type", proxyType) + .put("s3.client.default.proxy.host", randomFrom("127.0.0.10")) + .put("s3.client.default.proxy.port", randomFrom(port)) + .setSecureSettings(secureSettings) + .build(); + + final S3ClientSettings s3ClientSettings = S3ClientSettings.load(settings).get("default"); + + assertEquals(ProxySettings.ProxyType.valueOf(proxyType.toUpperCase(Locale.ROOT)), s3ClientSettings.proxySettings.getType()); + assertEquals(new InetSocketAddress(InetAddress.getByName("127.0.0.10"), port), s3ClientSettings.proxySettings.getAddress()); + assertEquals(userName, s3ClientSettings.proxySettings.getUsername()); + assertEquals(password, s3ClientSettings.proxySettings.getPassword()); + } + + public void testProxyWrongHost() { + final Settings settings = Settings.builder() + .put("s3.client.default.proxy.type", randomFrom("socks", "http")) + .put("s3.client.default.proxy.host", "thisisnotavalidhostorwehavebeensuperunlucky") + .put("s3.client.default.proxy.port", 8080) + .build(); + final SettingsException e = expectThrows(SettingsException.class, () -> S3ClientSettings.load(settings)); + assertEquals("S3 proxy host is unknown.", e.getMessage()); + } + + public void testProxyTypeNotSet() { + final Settings hostPortSettings = Settings.builder() + .put("s3.client.default.proxy.host", "127.0.0.1") + .put("s3.client.default.proxy.port", 8080) + .build(); + + SettingsException e = expectThrows(SettingsException.class, () -> S3ClientSettings.load(hostPortSettings)); + assertEquals("S3 proxy port or host or username or password have been set but proxy type is not defined.", e.getMessage()); + + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("s3.client.default.proxy.username", "aaaa"); + secureSettings.setString("s3.client.default.proxy.password", "bbbb"); + final Settings usernamePasswordSettings = Settings.builder().setSecureSettings(secureSettings).build(); + + e = expectThrows(SettingsException.class, () -> S3ClientSettings.load(usernamePasswordSettings)); + assertEquals("S3 proxy port or host or username or password have been set but proxy type is not defined.", e.getMessage()); + } + + public void testProxyHostNotSet() { + final Settings settings = Settings.builder() + .put("s3.client.default.proxy.port", 8080) + .put("s3.client.default.proxy.type", randomFrom("socks", "http", "https")) + .build(); + final SettingsException e = expectThrows(SettingsException.class, () -> S3ClientSettings.load(settings)); + assertEquals("S3 proxy type has been set but proxy host or port is not defined.", e.getMessage()); + } + + public void testSocksDoesNotSupportForHttpProtocol() { + final Settings settings = Settings.builder() + .put("s3.client.default.proxy.host", "127.0.0.1") + .put("s3.client.default.proxy.port", 8080) + .put("s3.client.default.protocol", "http") + .put("s3.client.default.proxy.type", "socks") + .build(); + expectThrows(SettingsException.class, () -> S3ClientSettings.load(settings)); + } + } diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RetryingInputStreamTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RetryingInputStreamTests.java index c7d1cb43bd266..0f40a7b3392e8 100644 --- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RetryingInputStreamTests.java +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RetryingInputStreamTests.java @@ -109,7 +109,7 @@ private S3RetryingInputStream createInputStream(final byte[] data, @Nullable fin final AmazonS3 client = mock(AmazonS3.class); when(client.getObject(any(GetObjectRequest.class))).thenReturn(s3Object); final AmazonS3Reference clientReference = mock(AmazonS3Reference.class); - when(clientReference.client()).thenReturn(client); + when(clientReference.get()).thenReturn(client); final S3BlobStore blobStore = mock(S3BlobStore.class); when(blobStore.clientReference()).thenReturn(clientReference); diff --git a/plugins/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml b/plugins/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml index 0ec3d272ee02d..a0c2d2e593a47 100644 --- a/plugins/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml +++ b/plugins/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml @@ -131,17 +131,14 @@ setup: body: - index: _index: docs - _type: doc _id: 1 - snapshot: one - index: _index: docs - _type: doc _id: 2 - snapshot: one - index: _index: docs - _type: doc _id: 3 - snapshot: one diff --git a/plugins/store-smb/src/yamlRestTest/resources/rest-api-spec/test/store_smb/15_index_creation.yml b/plugins/store-smb/src/yamlRestTest/resources/rest-api-spec/test/store_smb/15_index_creation.yml index 09e59c7fc9d9a..fbbdcb8f153e0 100644 --- a/plugins/store-smb/src/yamlRestTest/resources/rest-api-spec/test/store_smb/15_index_creation.yml +++ b/plugins/store-smb/src/yamlRestTest/resources/rest-api-spec/test/store_smb/15_index_creation.yml @@ -19,7 +19,6 @@ id: 1 - match: { _index: smb-test } - - match: { _type: _doc } - match: { _id: "1"} - match: { _version: 1} - match: { _source: { foo: bar }} diff --git a/plugins/transport-nio/build.gradle b/plugins/transport-nio/build.gradle index 6e309e109247b..88355cdf22728 100644 --- a/plugins/transport-nio/build.gradle +++ b/plugins/transport-nio/build.gradle @@ -82,10 +82,7 @@ thirdPartyAudit { // from io.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty) 'org.bouncycastle.cert.X509v3CertificateBuilder', 'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter', - 'org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder', - 'org.bouncycastle.jce.provider.BouncyCastleProvider', 'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder', - 'org.bouncycastle.asn1.x500.X500Name', // from io.netty.handler.ssl.JettyNpnSslEngine (netty) 'org.eclipse.jetty.npn.NextProtoNego$ClientProvider', diff --git a/plugins/transport-nio/src/main/java/org/opensearch/http/nio/HttpReadWriteHandler.java b/plugins/transport-nio/src/main/java/org/opensearch/http/nio/HttpReadWriteHandler.java index 1d705bce64852..561695c06effe 100644 --- a/plugins/transport-nio/src/main/java/org/opensearch/http/nio/HttpReadWriteHandler.java +++ b/plugins/transport-nio/src/main/java/org/opensearch/http/nio/HttpReadWriteHandler.java @@ -199,8 +199,8 @@ private static boolean assertMessageTypes(Object message) { + ". Found type: " + message.getClass() + "."; - assert ((HttpPipelinedResponse) message) - .getDelegateRequest() instanceof NioHttpResponse : "This channel only pipelined responses with a delegate of type: " + assert ((HttpPipelinedResponse) message).getDelegateRequest() instanceof NioHttpResponse + : "This channel only pipelined responses with a delegate of type: " + NioHttpResponse.class + ". Found type: " + ((HttpPipelinedResponse) message).getDelegateRequest().getClass() diff --git a/qa/evil-tests/build.gradle b/qa/evil-tests/build.gradle index 691115864de16..19dc72c0c784f 100644 --- a/qa/evil-tests/build.gradle +++ b/qa/evil-tests/build.gradle @@ -40,7 +40,7 @@ apply plugin: 'opensearch.testclusters' apply plugin: 'opensearch.standalone-test' dependencies { - testImplementation 'com.google.jimfs:jimfs:1.1' + testImplementation 'com.google.jimfs:jimfs:1.2' } // TODO: give each evil test its own fresh JVM for more isolation. @@ -59,8 +59,17 @@ thirdPartyAudit { 'com.google.common.cache.Striped64', 'com.google.common.cache.Striped64$1', 'com.google.common.cache.Striped64$Cell', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', + 'com.google.common.hash.Striped64', + 'com.google.common.hash.Striped64$1', + 'com.google.common.hash.Striped64$Cell', 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', - 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1' + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1' ) } diff --git a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java index ee903dc0f2e59..a8302fdd6bc76 100644 --- a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java @@ -49,9 +49,6 @@ import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.common.xcontent.support.XContentMapValues; import org.opensearch.index.IndexSettings; -import org.opensearch.rest.action.document.RestBulkAction; -import org.opensearch.rest.action.document.RestUpdateAction; -import org.opensearch.rest.action.search.RestExplainAction; import org.opensearch.test.NotEqualMessageBuilder; import org.opensearch.test.XContentTestUtils; import org.opensearch.test.rest.OpenSearchRestTestCase; @@ -96,6 +93,7 @@ * with {@code tests.is_old_cluster} set to {@code false}. */ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase { + private String index; private String type; @@ -162,6 +160,7 @@ public void testSearch() throws Exception { count, true, true, + randomBoolean(), i -> JsonXContent.contentBuilder().startObject() .field("string", randomAlphaOfLength(10)) .field("int", randomInt(100)) @@ -181,7 +180,7 @@ public void testSearch() throws Exception { assertBasicSearchWorks(count); assertAllSearchWorks(count); assertBasicAggregationWorks(); - assertRealtimeGetWorks(type); + assertRealtimeGetWorks(); assertStoredBinaryFields(count); } @@ -197,9 +196,6 @@ public void testNewReplicasWork() throws Exception { } { mappingsAndSettings.startObject("mappings"); - if (isRunningAgainstAncientCluster()) { - mappingsAndSettings.startObject(type); - } mappingsAndSettings.startObject("properties"); { mappingsAndSettings.startObject("field"); @@ -207,21 +203,17 @@ public void testNewReplicasWork() throws Exception { mappingsAndSettings.endObject(); } mappingsAndSettings.endObject(); - if (isRunningAgainstAncientCluster()) { - mappingsAndSettings.endObject(); - } mappingsAndSettings.endObject(); } mappingsAndSettings.endObject(); Request createIndex = new Request("PUT", "/" + index); createIndex.setJsonEntity(Strings.toString(mappingsAndSettings)); - createIndex.setOptions(allowTypesRemovalWarnings()); client().performRequest(createIndex); int numDocs = randomIntBetween(2000, 3000); indexRandomDocuments( - numDocs, true, false, i -> JsonXContent.contentBuilder().startObject().field("field", "value").endObject()); + numDocs, true, false, randomBoolean(), i -> JsonXContent.contentBuilder().startObject().field("field", "value").endObject()); logger.info("Refreshing [{}]", index); client().performRequest(new Request("POST", "/" + index + "/_refresh")); } else { @@ -303,9 +295,6 @@ public void testShrink() throws IOException { { mappingsAndSettings.startObject("mappings"); { - if (isRunningAgainstAncientCluster()) { - mappingsAndSettings.startObject(type); - } mappingsAndSettings.startObject("properties"); { mappingsAndSettings.startObject("field"); @@ -315,30 +304,23 @@ public void testShrink() throws IOException { mappingsAndSettings.endObject(); } mappingsAndSettings.endObject(); - if (isRunningAgainstAncientCluster()) { - mappingsAndSettings.endObject(); - } } mappingsAndSettings.endObject(); - if (isRunningAgainstAncientCluster() == false) { - // the default number of shards is now one so we have to set the number of shards to be more than one explicitly - mappingsAndSettings.startObject("settings"); - { - mappingsAndSettings.field("index.number_of_shards", 5); - } - mappingsAndSettings.endObject(); + mappingsAndSettings.startObject("settings"); + { + mappingsAndSettings.field("index.number_of_shards", 5); } + mappingsAndSettings.endObject(); } mappingsAndSettings.endObject(); Request createIndex = new Request("PUT", "/" + index); createIndex.setJsonEntity(Strings.toString(mappingsAndSettings)); - createIndex.setOptions(allowTypesRemovalWarnings()); client().performRequest(createIndex); numDocs = randomIntBetween(512, 1024); indexRandomDocuments( - numDocs, true, true, i -> JsonXContent.contentBuilder().startObject().field("field", "value").endObject()); + numDocs, true, true, randomBoolean(), i -> JsonXContent.contentBuilder().startObject().field("field", "value").endObject()); ensureGreen(index); // wait for source index to be available on both nodes before starting shrink @@ -386,9 +368,6 @@ public void testShrinkAfterUpgrade() throws IOException { { mappingsAndSettings.startObject("mappings"); { - if (isRunningAgainstAncientCluster()) { - mappingsAndSettings.startObject(type); - } mappingsAndSettings.startObject("properties"); { mappingsAndSettings.startObject("field"); @@ -398,23 +377,17 @@ public void testShrinkAfterUpgrade() throws IOException { mappingsAndSettings.endObject(); } mappingsAndSettings.endObject(); - if (isRunningAgainstAncientCluster()) { - mappingsAndSettings.endObject(); - } } mappingsAndSettings.endObject(); - if (isRunningAgainstAncientCluster() == false) { - // the default number of shards is now one so we have to set the number of shards to be more than one explicitly - mappingsAndSettings.startObject("settings"); - mappingsAndSettings.field("index.number_of_shards", 5); - mappingsAndSettings.endObject(); - } + // the default number of shards is now one so we have to set the number of shards to be more than one explicitly + mappingsAndSettings.startObject("settings"); + mappingsAndSettings.field("index.number_of_shards", 5); + mappingsAndSettings.endObject(); } mappingsAndSettings.endObject(); Request createIndex = new Request("PUT", "/" + index); createIndex.setJsonEntity(Strings.toString(mappingsAndSettings)); - createIndex.setOptions(allowTypesRemovalWarnings()); client().performRequest(createIndex); numDocs = randomIntBetween(512, 1024); @@ -422,6 +395,7 @@ public void testShrinkAfterUpgrade() throws IOException { numDocs, true, true, + randomBoolean(), i -> JsonXContent.contentBuilder().startObject().field("field", "value").endObject() ); } else { @@ -490,15 +464,13 @@ public void testRollover() throws IOException { bulk.append("{\"index\":{}}\n"); bulk.append("{\"test\":\"test\"}\n"); } - Request bulkRequest = new Request("POST", "/" + index + "_write/" + type + "/_bulk"); + Request bulkRequest = new Request("POST", "/" + index + "_write/_bulk"); bulkRequest.setJsonEntity(bulk.toString()); bulkRequest.addParameter("refresh", ""); - bulkRequest.setOptions(expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE)); assertThat(EntityUtils.toString(client().performRequest(bulkRequest).getEntity()), containsString("\"errors\":false")); if (isRunningAgainstOldCluster()) { Request rolloverRequest = new Request("POST", "/" + index + "_write/_rollover"); - rolloverRequest.setOptions(allowTypesRemovalWarnings()); rolloverRequest.setJsonEntity("{" + " \"conditions\": {" + " \"max_docs\": 5" @@ -568,12 +540,10 @@ void assertAllSearchWorks(int count) throws IOException { // the 'string' field has a boost of 4 in the mappings so it should get a payload boost String stringValue = (String) XContentMapValues.extractValue("_source.string", bestHit); assertNotNull(stringValue); - String type = (String) bestHit.get("_type"); String id = (String) bestHit.get("_id"); - Request explainRequest = new Request("GET", "/" + index + "/" + type + "/" + id + "/_explain"); + Request explainRequest = new Request("GET", "/" + index + "/_explain" + "/" + id); explainRequest.setJsonEntity("{ \"query\": { \"match_all\" : {} }}"); - explainRequest.setOptions(expectWarnings(RestExplainAction.TYPES_DEPRECATION_MESSAGE)); String explanation = toStr(client().performRequest(explainRequest)); assertFalse("Could not find payload boost in explanation\n" + explanation, explanation.contains("payloadBoost")); @@ -613,7 +583,7 @@ void assertBasicAggregationWorks() throws IOException { assertTotalHits(termsCount, boolTerms); } - void assertRealtimeGetWorks(final String typeName) throws IOException { + void assertRealtimeGetWorks() throws IOException { Request disableAutoRefresh = new Request("PUT", "/" + index + "/_settings"); disableAutoRefresh.setJsonEntity("{ \"index\": { \"refresh_interval\" : -1 }}"); client().performRequest(disableAutoRefresh); @@ -624,12 +594,11 @@ void assertRealtimeGetWorks(final String typeName) throws IOException { Map hit = (Map) ((List)(XContentMapValues.extractValue("hits.hits", searchResponse))).get(0); String docId = (String) hit.get("_id"); - Request updateRequest = new Request("POST", "/" + index + "/" + typeName + "/" + docId + "/_update"); - updateRequest.setOptions(expectWarnings(RestUpdateAction.TYPES_DEPRECATION_MESSAGE)); + Request updateRequest = new Request("POST", "/" + index + "/_update/" + docId); updateRequest.setJsonEntity("{ \"doc\" : { \"foo\": \"bar\"}}"); client().performRequest(updateRequest); - Request getRequest = new Request("GET", "/" + index + "/" + typeName + "/" + docId); + Request getRequest = new Request("GET", "/" + index + "/" + type + "/" + docId); Map getRsp = entityAsMap(client().performRequest(getRequest)); Map source = (Map) getRsp.get("_source"); assertTrue("doc does not contain 'foo' key: " + source, source.containsKey("foo")); @@ -695,9 +664,6 @@ public void testEmptyShard() throws IOException { // before timing out .put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms") .put(SETTING_ALLOCATION_MAX_RETRY.getKey(), "0"); // fail faster - if (randomBoolean()) { - settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), "-1"); - } createIndex(index, settings.build()); } ensureGreen(index); @@ -717,14 +683,13 @@ public void testRecovery() throws Exception { * an index without a translog so we randomize whether * or not we have one. */ shouldHaveTranslog = randomBoolean(); - Settings.Builder settings = Settings.builder(); if (minimumNodeVersion().before(Version.V_2_0_0) && randomBoolean()) { settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean()); } - createIndex(index, settings.build()); - - indexRandomDocuments(count, true, true, i -> jsonBuilder().startObject().field("field", "value").endObject()); + final String mappings = randomBoolean() ? "\"_source\": { \"enabled\": false}" : null; + createIndex(index, settings.build(), mappings); + indexRandomDocuments(count, true, true, true, i -> jsonBuilder().startObject().field("field", "value").endObject()); // make sure all recoveries are done ensureGreen(index); @@ -735,28 +700,26 @@ public void testRecovery() throws Exception { flushRequest.addParameter("wait_if_ongoing", "true"); assertOK(client().performRequest(flushRequest)); - if (randomBoolean()) { - syncedFlush(index, randomBoolean()); - } if (shouldHaveTranslog) { // Update a few documents so we are sure to have a translog indexRandomDocuments( - count / 10, - false, // flushing here would invalidate the whole thing - false, - i -> jsonBuilder().startObject().field("field", "value").endObject() + count / 10, + false, // flushing here would invalidate the whole thing + false, + true, + i -> jsonBuilder().startObject().field("field", "value").endObject() ); } - saveInfoDocument("should_have_translog", Boolean.toString(shouldHaveTranslog)); + saveInfoDocument(index + "_should_have_translog", Boolean.toString(shouldHaveTranslog)); } else { count = countOfIndexedRandomDocuments(); - shouldHaveTranslog = Booleans.parseBoolean(loadInfoDocument("should_have_translog")); + shouldHaveTranslog = Booleans.parseBoolean(loadInfoDocument(index + "_should_have_translog")); } // Count the documents in the index to make sure we have as many as we put there Request countRequest = new Request("GET", "/" + index + "/_search"); countRequest.addParameter("size", "0"); - refresh(); + refreshAllIndices(); Map countResponse = entityAsMap(client().performRequest(countRequest)); assertTotalHits(count, countResponse); @@ -789,6 +752,7 @@ public void testRecovery() throws Exception { String currentLuceneVersion = Version.CURRENT.luceneVersion.toString(); String bwcLuceneVersion = getOldClusterVersion().luceneVersion.toString(); + String minCompatibleBWCVersion = Version.CURRENT.minimumCompatibilityVersion().luceneVersion.toString(); if (shouldHaveTranslog && false == currentLuceneVersion.equals(bwcLuceneVersion)) { int numCurrentVersion = 0; int numBwcVersion = 0; @@ -807,6 +771,10 @@ public void testRecovery() throws Exception { numCurrentVersion++; } else if (bwcLuceneVersion.equals(version)) { numBwcVersion++; + } else if (minCompatibleBWCVersion.equals(version) && minCompatibleBWCVersion.equals(bwcLuceneVersion) == false) { + // Our upgrade path from 7.non-last always goes through 7.last, which depending on timing can create 7.last + // index segment. We ignore those. + continue; } else { fail("expected version to be one of [" + currentLuceneVersion + "," + bwcLuceneVersion + "] but was " + line); } @@ -837,7 +805,7 @@ public void testSnapshotRestore() throws IOException { settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean()); } createIndex(index, settings.build()); - indexRandomDocuments(count, true, true, i -> jsonBuilder().startObject().field("field", "value").endObject()); + indexRandomDocuments(count, true, true, randomBoolean(), i -> jsonBuilder().startObject().field("field", "value").endObject()); } else { count = countOfIndexedRandomDocuments(); } @@ -865,9 +833,6 @@ public void testSnapshotRestore() throws IOException { } templateBuilder.endObject(); templateBuilder.startObject("mappings"); { - if (isRunningAgainstAncientCluster()) { - templateBuilder.startObject(type); - } { templateBuilder.startObject("_source"); { @@ -875,9 +840,6 @@ public void testSnapshotRestore() throws IOException { } templateBuilder.endObject(); } - if (isRunningAgainstAncientCluster()) { - templateBuilder.endObject(); - } } templateBuilder.endObject(); templateBuilder.startObject("aliases"); { @@ -896,7 +858,6 @@ public void testSnapshotRestore() throws IOException { templateBuilder.endObject().endObject(); Request createTemplateRequest = new Request("PUT", "/_template/test_template"); createTemplateRequest.setJsonEntity(Strings.toString(templateBuilder)); - createTemplateRequest.setOptions(allowTypesRemovalWarnings()); client().performRequest(createTemplateRequest); @@ -987,10 +948,10 @@ public void testSoftDeletes() throws Exception { int numDocs = between(10, 100); for (int i = 0; i < numDocs; i++) { String doc = Strings.toString(JsonXContent.contentBuilder().startObject().field("field", "v1").endObject()); - Request request = new Request("POST", "/" + index + "/" + type + "/" + i); + Request request = new Request("POST", "/" + index + "/_doc/" + i); request.setJsonEntity(doc); client().performRequest(request); - refresh(); + refreshAllIndices(); } client().performRequest(new Request("POST", "/" + index + "/_flush")); int liveDocs = numDocs; @@ -998,19 +959,19 @@ public void testSoftDeletes() throws Exception { for (int i = 0; i < numDocs; i++) { if (randomBoolean()) { String doc = Strings.toString(JsonXContent.contentBuilder().startObject().field("field", "v2").endObject()); - Request request = new Request("POST", "/" + index + "/" + type + "/" + i); + Request request = new Request("POST", "/" + index + "/_doc/" + i); request.setJsonEntity(doc); client().performRequest(request); } else if (randomBoolean()) { - client().performRequest(new Request("DELETE", "/" + index + "/" + type + "/" + i)); + client().performRequest(new Request("DELETE", "/" + index + "/_doc/" + i)); liveDocs--; } } - refresh(); + refreshAllIndices(); assertTotalHits(liveDocs, entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")))); - saveInfoDocument("doc_count", Integer.toString(liveDocs)); + saveInfoDocument(index + "_doc_count", Integer.toString(liveDocs)); } else { - int liveDocs = Integer.parseInt(loadInfoDocument("doc_count")); + int liveDocs = Integer.parseInt(loadInfoDocument(index + "_doc_count")); assertTotalHits(liveDocs, entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")))); } } @@ -1145,10 +1106,9 @@ private void checkSnapshot(final String snapshotName, final int count, final Ver bulk.append("{\"index\":{\"_id\":\"").append(count + i).append("\"}}\n"); bulk.append("{\"test\":\"test\"}\n"); } - Request writeToRestoredRequest = new Request("POST", "/restored_" + index + "/" + type + "/_bulk"); + Request writeToRestoredRequest = new Request("POST", "/restored_" + index + "/_bulk"); writeToRestoredRequest.addParameter("refresh", "true"); writeToRestoredRequest.setJsonEntity(bulk.toString()); - writeToRestoredRequest.setOptions(expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE)); assertThat(EntityUtils.toString(client().performRequest(writeToRestoredRequest).getEntity()), containsString("\"errors\":false")); // And count to make sure the add worked @@ -1156,7 +1116,7 @@ private void checkSnapshot(final String snapshotName, final int count, final Ver Request countAfterWriteRequest = new Request("GET", "/restored_" + index + "/_search"); countAfterWriteRequest.addParameter("size", "0"); Map countAfterResponse = entityAsMap(client().performRequest(countRequest)); - assertTotalHits(count+extras, countAfterResponse); + assertTotalHits(count + extras, countAfterResponse); // Clean up the index for the next iteration client().performRequest(new Request("DELETE", "/restored_*")); @@ -1166,24 +1126,17 @@ private void checkSnapshot(final String snapshotName, final int count, final Ver clusterSettingsRequest.addParameter("flat_settings", "true"); Map clusterSettingsResponse = entityAsMap(client().performRequest(clusterSettingsRequest)); @SuppressWarnings("unchecked") final Map persistentSettings = - (Map)clusterSettingsResponse.get("persistent"); + (Map) clusterSettingsResponse.get("persistent"); assertThat(persistentSettings.get("cluster.routing.allocation.exclude.test_attr"), equalTo(getOldClusterVersion().toString())); // Check that the template was restored successfully Request getTemplateRequest = new Request("GET", "/_template/test_template"); - getTemplateRequest.setOptions(allowTypesRemovalWarnings()); Map getTemplateResponse = entityAsMap(client().performRequest(getTemplateRequest)); Map expectedTemplate = new HashMap<>(); expectedTemplate.put("index_patterns", singletonList("evil_*")); expectedTemplate.put("settings", singletonMap("index", singletonMap("number_of_shards", "1"))); - // We don't have the type in the response starting with 7.0, but we won't have it on old cluster after upgrade - // either so look at the response to figure out the correct assertions - if (isTypeInTemplateResponse(getTemplateResponse)) { - expectedTemplate.put("mappings", singletonMap(type, singletonMap("_source", singletonMap("enabled", true)))); - } else { - expectedTemplate.put("mappings", singletonMap("_source", singletonMap("enabled", true))); - } + expectedTemplate.put("mappings", singletonMap("_source", singletonMap("enabled", true))); expectedTemplate.put("order", 0); Map aliases = new HashMap<>(); @@ -1199,32 +1152,23 @@ private void checkSnapshot(final String snapshotName, final int count, final Ver } } - @SuppressWarnings("unchecked") - private boolean isTypeInTemplateResponse(Map getTemplateResponse) { - return ( (Map) ( - (Map) getTemplateResponse.getOrDefault("test_template", emptyMap()) - ).get("mappings")).get("_source") == null; - } - // TODO tests for upgrades after shrink. We've had trouble with shrink in the past. private void indexRandomDocuments( - final int count, - final boolean flushAllowed, - final boolean saveInfo, - final CheckedFunction docSupplier) - throws IOException { + final int count, + final boolean flushAllowed, + final boolean saveInfo, + final boolean specifyId, + final CheckedFunction docSupplier + ) throws IOException { logger.info("Indexing {} random documents", count); for (int i = 0; i < count; i++) { logger.debug("Indexing document [{}]", i); - Request createDocument = new Request("POST", "/" + index + "/" + type + "/" + i); - if (isRunningAgainstAncientCluster() == false) { - createDocument.setOptions(expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE)); - } + Request createDocument = new Request("POST", "/" + index + "/_doc/" + (specifyId ? i : "")); createDocument.setJsonEntity(Strings.toString(docSupplier.apply(i))); client().performRequest(createDocument); if (rarely()) { - refresh(); + refreshAllIndices(); } if (flushAllowed && rarely()) { logger.debug("Flushing [{}]", index); @@ -1232,7 +1176,7 @@ private void indexRandomDocuments( } } if (saveInfo) { - saveInfoDocument("count", Integer.toString(count)); + saveInfoDocument(index + "_count", Integer.toString(count)); } } @@ -1243,22 +1187,22 @@ private void indexDocument(String id) throws IOException { } private int countOfIndexedRandomDocuments() throws IOException { - return Integer.parseInt(loadInfoDocument("count")); + return Integer.parseInt(loadInfoDocument(index + "_count")); } - private void saveInfoDocument(String type, String value) throws IOException { + private void saveInfoDocument(String id, String value) throws IOException { XContentBuilder infoDoc = JsonXContent.contentBuilder().startObject(); infoDoc.field("value", value); infoDoc.endObject(); // Only create the first version so we know how many documents are created when the index is first created - Request request = new Request("PUT", "/info/" + this.type + "/" + index + "_" + type); + Request request = new Request("PUT", "/info/" + type + "/" + id); request.addParameter("op_type", "create"); request.setJsonEntity(Strings.toString(infoDoc)); client().performRequest(request); } - private String loadInfoDocument(String type) throws IOException { - Request request = new Request("GET", "/info/" + this.type + "/" + index + "_" + type); + private String loadInfoDocument(String id) throws IOException { + Request request = new Request("GET", "/info/_doc/" + id); request.addParameter("filter_path", "_source"); String doc = toStr(client().performRequest(request)); Matcher m = Pattern.compile("\"value\":\"(.+)\"").matcher(doc); @@ -1266,10 +1210,6 @@ private String loadInfoDocument(String type) throws IOException { return m.group(1); } - private Object randomLenientBoolean() { - return randomFrom(new Object[] {"off", "no", "0", 0, "false", false, "on", "yes", "1", 1, "true", true}); - } - private void refresh() throws IOException { logger.debug("Refreshing [{}]", index); client().performRequest(new Request("POST", "/" + index + "/_refresh")); @@ -1392,45 +1332,6 @@ public void testTurnOffTranslogRetentionAfterUpgraded() throws Exception { } } - public void testRecoveryWithTranslogRetentionDisabled() throws Exception { - if (isRunningAgainstOldCluster()) { - final Settings.Builder settings = Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1); - if (minimumNodeVersion().before(Version.V_2_0_0)) { - settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean()); - } - if (randomBoolean()) { - settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), "-1"); - } - if (randomBoolean()) { - settings.put(IndexSettings.INDEX_TRANSLOG_GENERATION_THRESHOLD_SIZE_SETTING.getKey(), "1kb"); - } - createIndex(index, settings.build()); - ensureGreen(index); - int numDocs = randomIntBetween(0, 100); - for (int i = 0; i < numDocs; i++) { - indexDocument(Integer.toString(i)); - if (rarely()) { - flush(index, randomBoolean()); - } - } - client().performRequest(new Request("POST", "/" + index + "/_refresh")); - if (randomBoolean()) { - ensurePeerRecoveryRetentionLeasesRenewedAndSynced(index); - } - if (randomBoolean()) { - flush(index, randomBoolean()); - } else if (randomBoolean()) { - syncedFlush(index, randomBoolean()); - } - saveInfoDocument("doc_count", Integer.toString(numDocs)); - } - ensureGreen(index); - final int numDocs = Integer.parseInt(loadInfoDocument("doc_count")); - assertTotalHits(numDocs, entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")))); - } - public void testResize() throws Exception { int numDocs; if (isRunningAgainstOldCluster()) { @@ -1608,7 +1509,7 @@ public void testEnableSoftDeletesOnRestore() throws Exception { createIndex(index, settings.build()); ensureGreen(index); int numDocs = randomIntBetween(0, 100); - indexRandomDocuments(numDocs, true, true, i -> jsonBuilder().startObject().field("field", "value").endObject()); + indexRandomDocuments(numDocs, true, true, randomBoolean(), i -> jsonBuilder().startObject().field("field", "value").endObject()); // create repo XContentBuilder repoConfig = JsonXContent.contentBuilder().startObject(); { @@ -1662,7 +1563,7 @@ public void testForbidDisableSoftDeletesOnRestore() throws Exception { createIndex(index, settings.build()); ensureGreen(index); int numDocs = randomIntBetween(0, 100); - indexRandomDocuments(numDocs, true, true, i -> jsonBuilder().startObject().field("field", "value").endObject()); + indexRandomDocuments(numDocs, true, true, randomBoolean(), i -> jsonBuilder().startObject().field("field", "value").endObject()); // create repo XContentBuilder repoConfig = JsonXContent.contentBuilder().startObject(); { diff --git a/qa/mixed-cluster/src/test/java/org/opensearch/backwards/IndexingIT.java b/qa/mixed-cluster/src/test/java/org/opensearch/backwards/IndexingIT.java index a0c8e8b9a03ac..b133a6462a525 100644 --- a/qa/mixed-cluster/src/test/java/org/opensearch/backwards/IndexingIT.java +++ b/qa/mixed-cluster/src/test/java/org/opensearch/backwards/IndexingIT.java @@ -46,8 +46,6 @@ import org.opensearch.common.xcontent.support.XContentMapValues; import org.opensearch.index.seqno.SeqNoStats; import org.opensearch.rest.RestStatus; -import org.opensearch.rest.action.document.RestGetAction; -import org.opensearch.rest.action.document.RestIndexAction; import org.opensearch.test.rest.OpenSearchRestTestCase; import org.opensearch.test.rest.yaml.ObjectPath; @@ -365,7 +363,6 @@ private void assertCount(final String index, final String preference, final int private void assertVersion(final String index, final int docId, final String preference, final int expectedVersion) throws IOException { Request request = new Request("GET", index + "/_doc/" + docId); request.addParameter("preference", preference); - request.setOptions(expectWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE)); final Response response = client().performRequest(request); assertOK(response); diff --git a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/20_info.yml b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/20_info.yml index 35d9c02e7e362..f8a31c5ec9214 100644 --- a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/20_info.yml +++ b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/20_info.yml @@ -96,10 +96,6 @@ --- "skip_unavailable is returned as part of _remote/info response": - - skip: - version: " - 6.0.99" - reason: "skip_unavailable is only returned from 6.1.0 on" - - do: cluster.get_settings: include_defaults: true diff --git a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/60_tophits.yml b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/60_tophits.yml index 9d94e7d5abb3f..cc75ce692e6bf 100644 --- a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/60_tophits.yml +++ b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/60_tophits.yml @@ -24,7 +24,7 @@ teardown: bulk: refresh: true body: - - '{"index": {"_index": "single_doc_index", "_type": "test_type"}}' + - '{"index": {"_index": "single_doc_index"}}' - '{"f1": "local_cluster", "sort_field": 0}' - do: search: diff --git a/qa/os/src/test/java/org/opensearch/packaging/util/Cleanup.java b/qa/os/src/test/java/org/opensearch/packaging/util/Cleanup.java index 6bd7f07320350..d18c0d8d7cca1 100644 --- a/qa/os/src/test/java/org/opensearch/packaging/util/Cleanup.java +++ b/qa/os/src/test/java/org/opensearch/packaging/util/Cleanup.java @@ -75,16 +75,14 @@ public static void cleanEverything() throws Exception { sh.runIgnoreExitCode("ps aux | grep -i 'org.opensearch.bootstrap.OpenSearch' | awk {'print $2'} | xargs kill -9"); }); - Platforms.onWindows( - () -> { - // the view of processes returned by Get-Process doesn't expose command line arguments, so we use WMI here - sh.runIgnoreExitCode( - "Get-WmiObject Win32_Process | " - + "Where-Object { $_.CommandLine -Match 'org.opensearch.bootstrap.OpenSearch' } | " - + "ForEach-Object { $_.Terminate() }" - ); - } - ); + Platforms.onWindows(() -> { + // the view of processes returned by Get-Process doesn't expose command line arguments, so we use WMI here + sh.runIgnoreExitCode( + "Get-WmiObject Win32_Process | " + + "Where-Object { $_.CommandLine -Match 'org.opensearch.bootstrap.OpenSearch' } | " + + "ForEach-Object { $_.Terminate() }" + ); + }); Platforms.onLinux(Cleanup::purgePackagesLinux); diff --git a/qa/os/src/test/java/org/opensearch/packaging/util/ServerUtils.java b/qa/os/src/test/java/org/opensearch/packaging/util/ServerUtils.java index dcc6829eb4143..d92feec21daaf 100644 --- a/qa/os/src/test/java/org/opensearch/packaging/util/ServerUtils.java +++ b/qa/os/src/test/java/org/opensearch/packaging/util/ServerUtils.java @@ -198,12 +198,12 @@ public static void waitForOpenSearch(String status, String index, Installation i public static void runOpenSearchTests() throws Exception { makeRequest( - Request.Post("http://localhost:9200/library/book/1?refresh=true&pretty") + Request.Post("http://localhost:9200/library/_doc/1?refresh=true&pretty") .bodyString("{ \"title\": \"Book #1\", \"pages\": 123 }", ContentType.APPLICATION_JSON) ); makeRequest( - Request.Post("http://localhost:9200/library/book/2?refresh=true&pretty") + Request.Post("http://localhost:9200/library/_doc/2?refresh=true&pretty") .bodyString("{ \"title\": \"Book #2\", \"pages\": 456 }", ContentType.APPLICATION_JSON) ); diff --git a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java index 79745b1cc2f95..f34e5f7bc121a 100644 --- a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java @@ -205,12 +205,11 @@ public void testAutoIdWithOpTypeCreate() throws IOException { private void bulk(String index, String valueSuffix, int count) throws IOException { StringBuilder b = new StringBuilder(); for (int i = 0; i < count; i++) { - b.append("{\"index\": {\"_index\": \"").append(index).append("\", \"_type\": \"_doc\"}}\n"); + b.append("{\"index\": {\"_index\": \"").append(index).append("\"}}\n"); b.append("{\"f1\": \"v").append(i).append(valueSuffix).append("\", \"f2\": ").append(i).append("}\n"); } Request bulk = new Request("POST", "/_bulk"); bulk.addParameter("refresh", "true"); - bulk.setOptions(expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE)); bulk.setJsonEntity(b.toString()); client().performRequest(bulk); } diff --git a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/RecoveryIT.java b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/RecoveryIT.java index 41af456d3c44a..687fd1743c3d3 100644 --- a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/RecoveryIT.java +++ b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/RecoveryIT.java @@ -48,9 +48,6 @@ import org.opensearch.common.xcontent.support.XContentMapValues; import org.opensearch.index.IndexSettings; import org.opensearch.rest.RestStatus; -import org.opensearch.rest.action.document.RestGetAction; -import org.opensearch.rest.action.document.RestIndexAction; -import org.opensearch.rest.action.document.RestUpdateAction; import org.opensearch.test.rest.yaml.ObjectPath; import org.hamcrest.Matcher; import org.hamcrest.Matchers; @@ -67,7 +64,7 @@ import java.util.concurrent.TimeUnit; import java.util.function.Predicate; -import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiOfLength; +import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiLettersOfLength; import static org.opensearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING; import static org.opensearch.cluster.routing.allocation.decider.EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING; import static org.opensearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY; @@ -124,7 +121,7 @@ private int indexDocs(String index, final int idStart, final int numDocs) throws for (int i = 0; i < numDocs; i++) { final int id = idStart + i; Request indexDoc = new Request("PUT", index + "/_doc/" + id); - indexDoc.setJsonEntity("{\"test\": \"test_" + randomAsciiOfLength(2) + "\"}"); + indexDoc.setJsonEntity("{\"test\": \"test_" + randomAsciiLettersOfLength(2) + "\"}"); client().performRequest(indexDoc); } return numDocs; @@ -659,7 +656,6 @@ public void testUpdateDoc() throws Exception { for (int i = 0; i < times; i++) { long value = randomNonNegativeLong(); Request update = new Request("POST", index + "/_update/" + docId); - update.setOptions(expectWarnings(RestUpdateAction.TYPES_DEPRECATION_MESSAGE)); update.setJsonEntity("{\"doc\": {\"updated_field\": " + value + "}}"); client().performRequest(update); updates.put(docId, value); @@ -668,7 +664,6 @@ public void testUpdateDoc() throws Exception { client().performRequest(new Request("POST", index + "/_refresh")); for (int docId : updates.keySet()) { Request get = new Request("GET", index + "/_doc/" + docId); - get.setOptions(expectWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE)); Map doc = entityAsMap(client().performRequest(get)); assertThat(XContentMapValues.extractValue("_source.updated_field", doc), equalTo(updates.get(docId))); } diff --git a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml index 1956cd56e6850..f83c098e05741 100644 --- a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml +++ b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml @@ -78,5 +78,4 @@ - do: indices.get: index: queries - include_type_name: false - match: { queries.mappings.properties.id.type: "keyword" } diff --git a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/20_date_range.yml b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/20_date_range.yml index 83df474d70d89..89992eeba616f 100644 --- a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/20_date_range.yml +++ b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/20_date_range.yml @@ -80,9 +80,6 @@ --- "Create index with java style index in 6": - - skip: - version: " - 6.7.99, 7.0.0 -" - reason: java.time patterns are allowed since 6.8 - do: indices.create: index: java_for_range diff --git a/qa/smoke-test-http/src/test/java/org/opensearch/http/SearchRestCancellationIT.java b/qa/smoke-test-http/src/test/java/org/opensearch/http/SearchRestCancellationIT.java index e3503e2f065b1..a13d406f7b133 100644 --- a/qa/smoke-test-http/src/test/java/org/opensearch/http/SearchRestCancellationIT.java +++ b/qa/smoke-test-http/src/test/java/org/opensearch/http/SearchRestCancellationIT.java @@ -226,7 +226,7 @@ private static void indexTestData() { // Make sure we have a few segments BulkRequestBuilder bulkRequestBuilder = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); for (int j = 0; j < 20; j++) { - bulkRequestBuilder.add(client().prepareIndex("test", "_doc", Integer.toString(i * 5 + j)).setSource("field", "value")); + bulkRequestBuilder.add(client().prepareIndex("test").setId(Integer.toString(i * 5 + j)).setSource("field", "value")); } assertNoFailures(bulkRequestBuilder.get()); } diff --git a/qa/smoke-test-ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yml b/qa/smoke-test-ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yml index b70ad87387db6..7a0cdcbef0786 100644 --- a/qa/smoke-test-ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yml +++ b/qa/smoke-test-ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yml @@ -54,7 +54,6 @@ "docs": [ { "_index": "index", - "_type": "type", "_id": "id", "_source": { "foo": "bar" diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/opensearch/ingest/IngestDocumentMustacheIT.java b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/opensearch/ingest/IngestDocumentMustacheIT.java index 5b2468b6304b1..83643f3217720 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/opensearch/ingest/IngestDocumentMustacheIT.java +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/opensearch/ingest/IngestDocumentMustacheIT.java @@ -46,7 +46,7 @@ public class IngestDocumentMustacheIT extends AbstractScriptTestCase { public void testAccessMetadataViaTemplate() { Map document = new HashMap<>(); document.put("foo", "bar"); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, document); + IngestDocument ingestDocument = new IngestDocument("index", "id", null, null, null, document); ingestDocument.setFieldValue(compile("field1"), ValueSource.wrap("1 {{foo}}", scriptService)); assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 bar")); @@ -61,7 +61,7 @@ public void testAccessMapMetadataViaTemplate() { innerObject.put("baz", "hello baz"); innerObject.put("qux", Collections.singletonMap("fubar", "hello qux and fubar")); document.put("foo", innerObject); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, document); + IngestDocument ingestDocument = new IngestDocument("index", "id", null, null, null, document); ingestDocument.setFieldValue(compile("field1"), ValueSource.wrap("1 {{foo.bar}} {{foo.baz}} {{foo.qux.fubar}}", scriptService)); assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 hello bar hello baz hello qux and fubar")); @@ -80,7 +80,7 @@ public void testAccessListMetadataViaTemplate() { list.add(value); list.add(null); document.put("list2", list); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, document); + IngestDocument ingestDocument = new IngestDocument("index", "id", null, null, null, document); ingestDocument.setFieldValue(compile("field1"), ValueSource.wrap("1 {{list1.0}} {{list2.0}}", scriptService)); assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 foo {field=value}")); } @@ -90,7 +90,7 @@ public void testAccessIngestMetadataViaTemplate() { Map ingestMap = new HashMap<>(); ingestMap.put("timestamp", "bogus_timestamp"); document.put("_ingest", ingestMap); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, document); + IngestDocument ingestDocument = new IngestDocument("index", "id", null, null, null, document); ingestDocument.setFieldValue(compile("ingest_timestamp"), ValueSource.wrap("{{_ingest.timestamp}} and {{_source._ingest.timestamp}}", scriptService)); assertThat(ingestDocument.getFieldValue("ingest_timestamp", String.class), diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/opensearch/ingest/ValueSourceMustacheIT.java b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/opensearch/ingest/ValueSourceMustacheIT.java index 83641cca6156a..2804c73032f1b 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/opensearch/ingest/ValueSourceMustacheIT.java +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/opensearch/ingest/ValueSourceMustacheIT.java @@ -77,7 +77,7 @@ public void testValueSourceWithTemplates() { } public void testAccessSourceViaTemplate() { - IngestDocument ingestDocument = new IngestDocument("marvel", "type", "id", null, null, null, new HashMap<>()); + IngestDocument ingestDocument = new IngestDocument("marvel", "id", null, null, null, new HashMap<>()); assertThat(ingestDocument.hasField("marvel"), is(false)); ingestDocument.setFieldValue(compile("{{_index}}"), ValueSource.wrap("{{_index}}", scriptService)); assertThat(ingestDocument.getFieldValue("marvel", String.class), equalTo("marvel")); diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yml index 0235a346ad3d7..e6a2a3d52e116 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yml @@ -295,7 +295,6 @@ "docs": [ { "_index": "index", - "_type": "type", "_id": "id", "_source": { "foo": "bar" diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yml index 6d567d64532c7..27f7f804ead1c 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yml @@ -1,10 +1,5 @@ --- "Test with date processor": - - skip: - version: " - 6.9.99" - reason: pre-7.0.0 requires the 8 prefix for Java time formats, so would treat the format in this test as a Joda time format - features: "warnings" - - do: ingest.put_pipeline: id: "_id" diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/30_update_by_query_with_ingest.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/30_update_by_query_with_ingest.yml index 18929c47a4027..5eedae174eaa9 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/30_update_by_query_with_ingest.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/30_update_by_query_with_ingest.yml @@ -18,7 +18,6 @@ - do: index: index: twitter - type: _doc id: 1 body: { "user": "foobar" } - do: diff --git a/qa/translog-policy/build.gradle b/qa/translog-policy/build.gradle deleted file mode 100644 index 5ef7774045e16..0000000000000 --- a/qa/translog-policy/build.gradle +++ /dev/null @@ -1,117 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import org.opensearch.gradle.Version -import org.opensearch.gradle.info.BuildParams -import org.opensearch.gradle.testclusters.StandaloneRestIntegTestTask - -apply plugin: 'opensearch.testclusters' -apply plugin: 'opensearch.standalone-test' -apply from : "$rootDir/gradle/bwc-test.gradle" - -for (Version bwcVersion : BuildParams.bwcVersions.indexCompatible) { - if (bwcVersion.before('6.3.0')) { - // explicitly running restart on the current node does not work in step 2 - // below when plugins are installed, which is the case for some plugins - // prior to 6.3.0 - continue - } - String baseName = "v${bwcVersion}" - - testClusters { - "${baseName}" { - versions = [bwcVersion.toString(), project.version] - numberOfNodes = 2 - setting 'http.content_type.required', 'true' - } - } - - tasks.register("${baseName}#Step1OldClusterTest", StandaloneRestIntegTestTask) { - useCluster testClusters."${baseName}" - mustRunAfter(precommit) - systemProperty 'tests.test_step', 'step1' - systemProperty 'tests.is_old_cluster', 'true' - } - - tasks.register("${baseName}#Step2OldClusterTest", StandaloneRestIntegTestTask) { - useCluster testClusters."${baseName}" - dependsOn "${baseName}#Step1OldClusterTest" - doFirst { - testClusters."${baseName}".fullRestart() - } - systemProperty 'tests.test_step', 'step2' - systemProperty 'tests.is_old_cluster', 'true' - } - - tasks.register("${baseName}#Step3NewClusterTest", StandaloneRestIntegTestTask) { - useCluster testClusters."${baseName}" - dependsOn "${baseName}#Step2OldClusterTest" - doFirst { - testClusters."${baseName}".goToNextVersion() - } - systemProperty 'tests.test_step', 'step3' - systemProperty 'tests.is_old_cluster', 'false' - } - - tasks.register("${baseName}#Step4NewClusterTest", StandaloneRestIntegTestTask) { - useCluster testClusters."${baseName}" - dependsOn "${baseName}#Step3NewClusterTest" - doFirst { - testClusters."${baseName}".fullRestart() - } - systemProperty 'tests.test_step', 'step4' - systemProperty 'tests.is_old_cluster', 'false' - } - - String oldVersion = bwcVersion.toString().minus("-SNAPSHOT") - tasks.matching { it.name.startsWith(baseName) && it.name.endsWith("ClusterTest") }.configureEach { - it.systemProperty 'tests.old_cluster_version', oldVersion - it.nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}") - it.nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}") - } - - tasks.register(bwcTaskName(bwcVersion)) { - dependsOn tasks.named("${baseName}#Step4NewClusterTest") - } -} - -configurations { - testArtifacts.extendsFrom testRuntime -} - -task testJar(type: Jar) { - archiveAppendix = 'test' - from sourceSets.test.output -} - -artifacts { - testArtifacts testJar -} diff --git a/qa/translog-policy/src/test/java/org/opensearch/upgrades/TranslogPolicyIT.java b/qa/translog-policy/src/test/java/org/opensearch/upgrades/TranslogPolicyIT.java index 5ae9944429d21..0dc62b160ff3f 100644 --- a/qa/translog-policy/src/test/java/org/opensearch/upgrades/TranslogPolicyIT.java +++ b/qa/translog-policy/src/test/java/org/opensearch/upgrades/TranslogPolicyIT.java @@ -98,6 +98,7 @@ public void setType() { type = "_doc"; } + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/pull/2225") public void testEmptyIndex() throws Exception { if (TEST_STEP == TestStep.STEP1_OLD_CLUSTER) { final Settings.Builder settings = Settings.builder() @@ -113,6 +114,7 @@ public void testEmptyIndex() throws Exception { assertTotalHits(0, entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")))); } + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/pull/2225") public void testRecoverReplica() throws Exception { int numDocs = 100; if (TEST_STEP == TestStep.STEP1_OLD_CLUSTER) { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/count.json b/rest-api-spec/src/main/resources/rest-api-spec/api/count.json index 9f6461b16d3eb..8cdb3db7c12cd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/count.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/count.json @@ -27,27 +27,6 @@ "description":"A comma-separated list of indices to restrict the results" } } - }, - { - "path":"/{index}/{type}/_count", - "methods":[ - "POST", - "GET" - ], - "parts":{ - "index":{ - "type":"list", - "description":"A comma-separated list of indices to restrict the results" - }, - "type": { - "type" : "list", - "description" : "A comma-separated list of types to restrict the results" - } - }, - "deprecated": { - "version" : "7.0.0", - "description" : "Specifying types in urls has been deprecated" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json index 9f651b17ea1b2..4c32974583aac 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json @@ -18,25 +18,6 @@ "description":"A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" } } - }, - { - "path" : "/{index}/{type}/_delete_by_query", - "methods": ["POST"], - "parts": { - "index": { - "required": true, - "type": "list", - "description": "A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" - }, - "type": { - "type": "list", - "description": "A comma-separated list of document types to search; leave empty to perform the operation on all types" - } - }, - "deprecated": { - "version" : "7.0.0", - "description" : "Specifying types in urls has been deprecated" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/exists.json b/rest-api-spec/src/main/resources/rest-api-spec/api/exists.json index 09042376a256b..fd221b474a070 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/exists.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/exists.json @@ -22,31 +22,6 @@ "description":"The name of the index" } } - }, - { - "path":"/{index}/{type}/{id}", - "methods":[ - "HEAD" - ], - "parts":{ - "id":{ - "type":"string", - "description":"The document ID" - }, - "index":{ - "type":"string", - "description":"The name of the index" - }, - "type":{ - "type":"string", - "description":"The type of the document (use `_all` to fetch the first document matching the ID across all types)", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/get.json index 0c8d62d6d1d34..2ce77f17aff10 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/get.json @@ -22,31 +22,6 @@ "description":"The name of the index" } } - }, - { - "path":"/{index}/{type}/{id}", - "methods":[ - "GET" - ], - "parts":{ - "id":{ - "type":"string", - "description":"The document ID" - }, - "index":{ - "type":"string", - "description":"The name of the index" - }, - "type":{ - "type":"string", - "description":"The type of the document (use `_all` to fetch the first document matching the ID across all types)", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/get_source.json b/rest-api-spec/src/main/resources/rest-api-spec/api/get_source.json index e5336059d3924..ad79678388590 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/get_source.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/get_source.json @@ -22,31 +22,6 @@ "description":"The name of the index" } } - }, - { - "path":"/{index}/{type}/{id}/_source", - "methods":[ - "GET" - ], - "parts":{ - "id":{ - "type":"string", - "description":"The document ID" - }, - "index":{ - "type":"string", - "description":"The name of the index" - }, - "type":{ - "type":"string", - "description":"The type of the document; deprecated and optional starting with 7.0", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/index.json b/rest-api-spec/src/main/resources/rest-api-spec/api/index.json index 37f3cc9f9f82b..b4865403331b0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/index.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/index.json @@ -35,53 +35,6 @@ "description":"The name of the index" } } - }, - { - "path":"/{index}/{type}", - "methods":[ - "POST" - ], - "parts":{ - "index":{ - "type":"string", - "description":"The name of the index" - }, - "type":{ - "type":"string", - "description":"The type of the document", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } - }, - { - "path":"/{index}/{type}/{id}", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "id":{ - "type":"string", - "description":"Document ID" - }, - "index":{ - "type":"string", - "description":"The name of the index" - }, - "type":{ - "type":"string", - "description":"The type of the document", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json index 2b9e8617a661c..922183d628ac6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json @@ -22,10 +22,6 @@ ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether a type should be expected in the body of the mappings." - }, "wait_for_active_shards":{ "type":"string", "description":"Set the number of active shards to wait for before the operation returns." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json index f78b410f5b489..90a1274ecb059 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json @@ -22,10 +22,6 @@ ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether to add the type name to the response (default: false)" - }, "local":{ "type":"boolean", "description":"Return local information, do not retrieve the state from master node (default: false)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json index 15cc48a582cc4..0e71b6d395777 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json @@ -34,60 +34,10 @@ "description":"A comma-separated list of fields" } } - }, - { - "path":"/_mapping/{type}/field/{fields}", - "methods":[ - "GET" - ], - "parts":{ - "type":{ - "type":"list", - "description":"A comma-separated list of document types", - "deprecated":true - }, - "fields":{ - "type":"list", - "description":"A comma-separated list of fields" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } - }, - { - "path":"/{index}/_mapping/{type}/field/{fields}", - "methods":[ - "GET" - ], - "parts":{ - "index":{ - "type":"list", - "description":"A comma-separated list of index names" - }, - "type":{ - "type":"list", - "description":"A comma-separated list of document types", - "deprecated":true - }, - "fields":{ - "type":"list", - "description":"A comma-separated list of fields" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } } ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether a type should be returned in the body of the mappings." - }, "include_defaults":{ "type":"boolean", "description":"Whether the default mapping values should be returned as well" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json index 1d9e795c6ed5d..24fd668069697 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json @@ -24,52 +24,10 @@ "description":"A comma-separated list of index names" } } - }, - { - "path":"/_mapping/{type}", - "methods":[ - "GET" - ], - "parts":{ - "type":{ - "type":"list", - "description":"A comma-separated list of document types", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } - }, - { - "path":"/{index}/_mapping/{type}", - "methods":[ - "GET" - ], - "parts":{ - "index":{ - "type":"list", - "description":"A comma-separated list of index names" - }, - "type":{ - "type":"list", - "description":"A comma-separated list of document types", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } } ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether to add the type name to the response (default: false)" - }, "ignore_unavailable":{ "type":"boolean", "description":"Whether specified concrete indices should be ignored when unavailable (missing or closed)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json index 9e07ae663ff8f..337016763ad0a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json @@ -28,10 +28,6 @@ ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether a type should be returned in the body of the mappings." - }, "flat_settings":{ "type":"boolean", "description":"Return settings in flat format (default: false)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json index f23380ac2f1ac..451cbccd8d329 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json @@ -19,155 +19,10 @@ "description":"A comma-separated list of index names the mapping should be added to (supports wildcards); use `_all` or omit to add the mapping on all indices." } } - }, - { - "path":"/{index}/{type}/_mapping", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "index":{ - "type":"list", - "description":"A comma-separated list of index names the mapping should be added to (supports wildcards); use `_all` or omit to add the mapping on all indices." - }, - "type":{ - "type":"string", - "description":"The name of the document type", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } - }, - { - "path":"/{index}/_mapping/{type}", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "index":{ - "type":"list", - "description":"A comma-separated list of index names the mapping should be added to (supports wildcards); use `_all` or omit to add the mapping on all indices." - }, - "type":{ - "type":"string", - "description":"The name of the document type", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } - }, - { - "path":"/{index}/{type}/_mappings", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "index":{ - "type":"list", - "description":"A comma-separated list of index names the mapping should be added to (supports wildcards); use `_all` or omit to add the mapping on all indices." - }, - "type":{ - "type":"string", - "description":"The name of the document type", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } - }, - { - "path":"/{index}/_mappings/{type}", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "index":{ - "type":"list", - "description":"A comma-separated list of index names the mapping should be added to (supports wildcards); use `_all` or omit to add the mapping on all indices." - }, - "type":{ - "type":"string", - "description":"The name of the document type", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } - }, - { - "path":"/_mappings/{type}", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "type":{ - "type":"string", - "description":"The name of the document type", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } - }, - { - "path":"/{index}/_mappings", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "index":{ - "type":"list", - "description":"A comma-separated list of index names the mapping should be added to (supports wildcards); use `_all` or omit to add the mapping on all indices." - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"The plural mappings is accepted but only /_mapping is documented" - } - }, - { - "path":"/_mapping/{type}", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "type":{ - "type":"string", - "description":"The name of the document type", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } } ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether a type should be expected in the body of the mappings." - }, "timeout":{ "type":"time", "description":"Explicit operation timeout" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json index 701a722d89eb8..75a328af929ef 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json @@ -23,10 +23,6 @@ ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether a type should be returned in the body of the mappings." - }, "order":{ "type":"number", "description":"The order for this template when merging multiple matching ones (higher numbers are merged later, overriding the lower numbers)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json index 4ed1f9b490969..fef1f03d1c9a7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json @@ -38,10 +38,6 @@ ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether a type should be included in the body of the mappings." - }, "timeout":{ "type":"time", "description":"Explicit operation timeout" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/mget.json b/rest-api-spec/src/main/resources/rest-api-spec/api/mget.json index f1d35aee7d62f..e0b58139ed684 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/mget.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/mget.json @@ -26,28 +26,6 @@ "description":"The name of the index" } } - }, - { - "path":"/{index}/{type}/_mget", - "methods":[ - "GET", - "POST" - ], - "parts":{ - "index":{ - "type":"string", - "description":"The name of the index" - }, - "type":{ - "type":"string", - "description":"The type of the document", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } } ] }, @@ -86,7 +64,7 @@ } }, "body":{ - "description":"Document identifiers; can be either `docs` (containing full document information) or `ids` (when index and type is provided in the URL.", + "description":"Document identifiers; can be either `docs` (containing full document information) or `ids` (when index is provided in the URL.", "required":true } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/msearch.json b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch.json index e3e6ef57e42c8..3a3a6ebe1bff5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/msearch.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch.json @@ -15,34 +15,16 @@ ] }, { - "path":"/{index}/_msearch", - "methods":[ + "path": "/{index}/_msearch", + "methods": [ "GET", "POST" ], - "parts":{ - "index":{ - "type":"list", - "description":"A comma-separated list of index names to use as default" - } - } - }, - { - "path" : "/{index}/{type}/_msearch", - "methods": ["GET", "POST"], "parts": { "index": { - "type" : "list", - "description" : "A comma-separated list of index names to use as default" - }, - "type": { - "type" : "list", - "description" : "A comma-separated list of document types to use as default" + "type": "list", + "description": "A comma-separated list of index names to use as default" } - }, - "deprecated": { - "version" : "7.0.0", - "description" : "Specifying types in urls has been deprecated" } } ] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json index 8eb300c975932..7ac194f91bf56 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json @@ -26,24 +26,6 @@ "description":"A comma-separated list of index names to use as default" } } - }, - { - "path" : "/{index}/{type}/_msearch/template", - "methods": ["GET", "POST"], - "parts": { - "index": { - "type" : "list", - "description" : "A comma-separated list of index names to use as default" - }, - "type": { - "type" : "list", - "description" : "A comma-separated list of document types to use as default" - } - }, - "deprecated": { - "version" : "7.0.0", - "description" : "Specifying types in urls has been deprecated" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/mtermvectors.json b/rest-api-spec/src/main/resources/rest-api-spec/api/mtermvectors.json index 93dee177e8026..d5fc7371e0898 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/mtermvectors.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/mtermvectors.json @@ -26,25 +26,6 @@ "description":"The index in which the document resides." } } - }, - { - "path" : "/{index}/{type}/_mtermvectors", - "methods" : ["GET", "POST"], - "parts" : { - "index" : { - "type" : "string", - "description" : "The index in which the document resides." - }, - "type" : { - "type" : "string", - "description" : "The type of the document." - } - }, - "deprecated":{ - "version" : "7.0.0", - - "description" : "Specifying types in urls has been deprecated" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json index 7770acc52f978..ac321acf8907b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json @@ -26,24 +26,6 @@ "description":"A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" } } - }, - { - "path" : "/{index}/{type}/_search", - "methods": ["GET", "POST"], - "parts": { - "index": { - "type" : "list", - "description" : "A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" - }, - "type": { - "type": "list", - "description": "A comma-separated list of document types to search; leave empty to perform the operation on all types" - } - }, - "deprecated" : { - "version" : "7.0.0", - "description" : "Specifying types in urls has been deprecated" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_template.json index 00bd09729c908..4230b660523b8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_template.json @@ -26,24 +26,6 @@ "description":"A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" } } - }, - { - "path" : "/{index}/{type}/_search/template", - "methods": ["GET", "POST"], - "parts": { - "index": { - "type" : "list", - "description" : "A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" - }, - "type": { - "type" : "list", - "description" : "A comma-separated list of document types to search; leave empty to perform the operation on all types" - } - }, - "deprecated" : { - "version" : "7.0.0", - "description" : "Specifying types in urls has been deprecated" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/termvectors.json b/rest-api-spec/src/main/resources/rest-api-spec/api/termvectors.json index dd7fac97d79a7..b6cb3663c2df2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/termvectors.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/termvectors.json @@ -36,50 +36,6 @@ "description":"The index in which the document resides." } } - }, - { - "path" : "/{index}/{type}/{id}/_termvectors", - "methods" : ["GET", "POST"], - "parts" : { - "index" : { - "type" : "string", - "description" : "The index in which the document resides.", - "required" : true - }, - "type" : { - "type" : "string", - "description" : "The type of the document.", - "required" : false - }, - "id" : { - "type" : "string", - "description" : "The id of the document, when not specified a doc param should be supplied." - } - }, - "deprecated": { - "version" : "7.0.0", - "description" : "Specifying types in urls has been deprecated" - } - }, - { - "path" : "/{index}/{type}/_termvectors", - "methods" : ["GET", "POST"], - "parts" : { - "index" : { - "type" : "string", - "description" : "The index in which the document resides.", - "required" : true - }, - "type" : { - "type" : "string", - "description" : "The type of the document.", - "required" : false - } - }, - "deprecated": { - "version" : "7.0.0", - "description" : "Specifying types in urls has been deprecated" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json index 930f1a9700076..71a0c1fc8ad95 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json @@ -18,25 +18,6 @@ "description":"A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" } } - }, - { - "path" : "/{index}/{type}/_update_by_query", - "methods": ["POST"], - "parts": { - "index": { - "required": true, - "type": "list", - "description": "A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" - }, - "type": { - "type": "list", - "description": "A comma-separated list of document types to search; leave empty to perform the operation on all types" - } - }, - "deprecated" : { - "version" : "7.0.0", - "description" : "Specifying types in urls has been deprecated" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml index 59612af74616c..8c8c6d50abf41 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml @@ -1,10 +1,5 @@ --- "Array of objects": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: bulk: refresh: true @@ -28,11 +23,6 @@ --- "Empty _id": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: bulk: refresh: true @@ -107,12 +97,8 @@ --- "empty action": - - skip: - version: " - 6.99.99" - features: headers - reason: types are required in requests before 7.0.0 - + features: headers - do: catch: /Malformed action\/metadata line \[3\], expected FIELD_NAME but found \[END_OBJECT\]/ headers: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/11_basic_with_types.yml deleted file mode 100644 index 6bebed7bc1dd0..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/11_basic_with_types.yml +++ /dev/null @@ -1,120 +0,0 @@ ---- -"Array of objects": - - do: - bulk: - refresh: true - body: - - index: - _index: test_index - _type: test_type - _id: test_id - - f1: v1 - f2: 42 - - index: - _index: test_index - _type: test_type - _id: test_id2 - - f1: v2 - f2: 47 - - - do: - count: - index: test_index - - - match: {count: 2} - ---- -"Empty _id": - - do: - bulk: - refresh: true - body: - - index: - _index: test - _type: type - _id: '' - - f: 1 - - index: - _index: test - _type: type - _id: id - - f: 2 - - index: - _index: test - _type: type - - f: 3 - - match: { errors: true } - - match: { items.0.index.status: 400 } - - match: { items.0.index.error.type: illegal_argument_exception } - - match: { items.0.index.error.reason: if _id is specified it must not be empty } - - match: { items.1.index.result: created } - - match: { items.2.index.result: created } - - - do: - count: - index: test - - - match: { count: 2 } - ---- -"Empty _id with op_type create": - - skip: - version: " - 7.4.99" - reason: "auto id + op type create only supported since 7.5" - - - do: - bulk: - refresh: true - body: - - index: - _index: test - _type: type - _id: '' - - f: 1 - - index: - _index: test - _type: type - _id: id - - f: 2 - - index: - _index: test - _type: type - - f: 3 - - create: - _index: test - _type: type - - f: 4 - - index: - _index: test - _type: type - op_type: create - - f: 5 - - match: { errors: true } - - match: { items.0.index.status: 400 } - - match: { items.0.index.error.type: illegal_argument_exception } - - match: { items.0.index.error.reason: if _id is specified it must not be empty } - - match: { items.1.index.result: created } - - match: { items.2.index.result: created } - - match: { items.3.create.result: created } - - match: { items.4.create.result: created } - - - do: - count: - index: test - - - match: { count: 4 } - ---- -"empty action": - - skip: - features: headers - - - do: - catch: /Malformed action\/metadata line \[3\], expected FIELD_NAME but found \[END_OBJECT\]/ - headers: - Content-Type: application/json - bulk: - body: | - {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id"}} - {"f1": "v1", "f2": 42} - {} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml index b23517f6a8f25..3d956dce54289 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml @@ -1,9 +1,5 @@ --- "List of strings": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: bulk: refresh: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/21_list_of_strings_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/21_list_of_strings_with_types.yml deleted file mode 100644 index def91f4280722..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/21_list_of_strings_with_types.yml +++ /dev/null @@ -1,17 +0,0 @@ ---- -"List of strings": - - do: - bulk: - refresh: true - body: - - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id"}}' - - '{"f1": "v1", "f2": 42}' - - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id2"}}' - - '{"f1": "v2", "f2": 47}' - - - do: - count: - index: test_index - - - match: {count: 2} - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml index 38706d133e44b..8b6467eeed975 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml @@ -1,9 +1,5 @@ --- "One big string": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: bulk: refresh: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/31_big_string_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/31_big_string_with_types.yml deleted file mode 100644 index 1d117253c9b01..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/31_big_string_with_types.yml +++ /dev/null @@ -1,17 +0,0 @@ ---- -"One big string": - - do: - bulk: - refresh: true - body: | - {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id"}} - {"f1": "v1", "f2": 42} - {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id2"}} - {"f1": "v2", "f2": 47} - - - do: - count: - index: test_index - - - match: {count: 2} - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml index 5e783d60d3d46..e29e84740ee5c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml @@ -1,9 +1,5 @@ --- "Source filtering": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: index: refresh: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yml index 77098779c0c4f..34fc94691c21a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yml @@ -1,9 +1,5 @@ --- "refresh=true immediately makes changes are visible in search": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: bulk: refresh: true @@ -20,10 +16,6 @@ --- "refresh=empty string immediately makes changes are visible in search": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: bulk: refresh: "" @@ -41,10 +33,6 @@ --- "refresh=wait_for waits until changes are visible in search": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: bulk: refresh: wait_for diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/51_refresh_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/51_refresh_with_types.yml deleted file mode 100644 index 6326b9464caa0..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/51_refresh_with_types.yml +++ /dev/null @@ -1,48 +0,0 @@ ---- -"refresh=true immediately makes changes are visible in search": - - do: - bulk: - refresh: true - body: | - {"index": {"_index": "bulk_50_refresh_1", "_type": "test_type", "_id": "bulk_50_refresh_id1"}} - {"f1": "v1", "f2": 42} - {"index": {"_index": "bulk_50_refresh_1", "_type": "test_type", "_id": "bulk_50_refresh_id2"}} - {"f1": "v2", "f2": 47} - - - do: - count: - index: bulk_50_refresh_1 - - match: {count: 2} - ---- -"refresh=empty string immediately makes changes are visible in search": - - do: - bulk: - refresh: "" - body: | - {"index": {"_index": "bulk_50_refresh_2", "_type": "test_type", "_id": "bulk_50_refresh_id3"}} - {"f1": "v1", "f2": 42} - {"index": {"_index": "bulk_50_refresh_2", "_type": "test_type", "_id": "bulk_50_refresh_id4"}} - {"f1": "v2", "f2": 47} - - - do: - count: - index: bulk_50_refresh_2 - - match: {count: 2} - - ---- -"refresh=wait_for waits until changes are visible in search": - - do: - bulk: - refresh: wait_for - body: | - {"index": {"_index": "bulk_50_refresh_3", "_type": "test_type", "_id": "bulk_50_refresh_id5"}} - {"f1": "v1", "f2": 42} - {"index": {"_index": "bulk_50_refresh_3", "_type": "test_type", "_id": "bulk_50_refresh_id6"}} - {"f1": "v2", "f2": 47} - - - do: - count: - index: bulk_50_refresh_3 - - match: {count: 2} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/60_deprecated.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/60_deprecated.yml index 1401fcc086208..8c8a840eb3f47 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/60_deprecated.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/60_deprecated.yml @@ -1,12 +1,6 @@ --- "Deprecated parameters should fail in Bulk query": - - - skip: - version: " - 6.99.99" - reason: some parameters are removed starting from 7.0, their equivalents without underscore are used instead - features: "warnings" - - do: catch: bad_request bulk: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/70_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/70_mix_typeless_typeful.yml deleted file mode 100644 index cad0891b21e52..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/70_mix_typeless_typeful.yml +++ /dev/null @@ -1,35 +0,0 @@ ---- -"bulk without types on an index that has types": - - - skip: - version: " - 6.99.99" - reason: Typeless APIs were introduced in 7.0.0 - - - do: - indices.create: # not using include_type_name: false on purpose - include_type_name: true - index: index - body: - mappings: - not_doc: - properties: - foo: - type: "keyword" - - do: - bulk: - refresh: true - body: - - index: - _index: index - _id: 0 - - foo: bar - - index: - _index: index - _id: 1 - - foo: bar - - - do: - count: - index: index - - - match: {count: 2} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/80_cas.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/80_cas.yml index 902621cfba578..87d3d237d42cb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/80_cas.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/80_cas.yml @@ -1,10 +1,5 @@ --- "Compare And Swap Sequence Numbers": - - - skip: - version: " - 6.99.99" - reason: typeless API are add in 7.0.0 - - do: index: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.health/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.health/10_basic.yml index aa6c96202eaf4..db74e51cc2f91 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.health/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.health/10_basic.yml @@ -94,10 +94,6 @@ --- "cluster health basic test, one index with wait for no initializing shards": - - skip: - version: " - 6.1.99" - reason: "wait_for_no_initializing_shards is introduced in 6.2.0" - - do: indices.create: index: test_index diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.stats/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.stats/10_basic.yml index a40f4803ab0b8..a0432fa7aa558 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.stats/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.stats/10_basic.yml @@ -75,10 +75,6 @@ --- "get cluster stats returns discovery types": - - skip: - version: " - 6.99.99" - reason: "discovery types are added for v7.0.0" - - do: cluster.stats: {} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/count/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/count/11_basic_with_types.yml deleted file mode 100644 index 09d96670f688e..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/count/11_basic_with_types.yml +++ /dev/null @@ -1,61 +0,0 @@ -setup: - - do: - indices.create: - index: test - - do: - index: - index: test - id: 1 - body: { foo: bar } - - - do: - indices.refresh: - index: [test] - ---- -"count with body": - - do: - count: - index: test - body: - query: - match: - foo: bar - - - match: {count : 1} - - - do: - count: - index: test - body: - query: - match: - foo: test - - - match: {count : 0} - ---- -"count with empty body": -# empty body should default to match_all query - - do: - count: - index: test - body: { } - - - match: {count : 1} - - - do: - count: - index: test - - - match: {count : 1} - ---- -"count body without query element": - - do: - catch: bad_request - count: - index: test - body: - match: - foo: bar diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/36_external_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/36_external_version_with_types.yml deleted file mode 100644 index 86d0d4b59e06b..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/36_external_version_with_types.yml +++ /dev/null @@ -1,28 +0,0 @@ ---- -"External version": - - - do: - catch: bad_request - create: - index: test - id: 1 - body: { foo: bar } - version_type: external - version: 0 - - - match: { status: 400 } - - match: { error.type: action_request_validation_exception } - - match: { error.reason: "Validation Failed: 1: create operations only support internal versioning. use index instead;" } - - - do: - catch: bad_request - create: - index: test - id: 2 - body: { foo: bar } - version_type: external - version: 5 - - - match: { status: 400 } - - match: { error.type: action_request_validation_exception } - - match: { error.reason: "Validation Failed: 1: create operations only support internal versioning. use index instead;" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/70_nested.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/70_nested.yml index e6d2413f16788..6b4e7ccc48ca6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/70_nested.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/create/70_nested.yml @@ -1,8 +1,5 @@ --- setup: - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: indices.create: index: test_1 @@ -16,9 +13,6 @@ setup: --- "Indexing a doc with No. nested objects less or equal to index.mapping.nested_objects.limit should succeed": - - skip: - version: " - 6.99.99" - reason: index.mapping.nested_objects setting has been added in 7.0.0 - do: create: index: test_1 @@ -29,9 +23,6 @@ setup: --- "Indexing a doc with No. nested objects more than index.mapping.nested_objects.limit should fail": - - skip: - version: " - 6.99.99" - reason: index.mapping.nested_objects setting has been added in 7.0.0 - do: catch: /The number of nested documents has exceeded the allowed limit of \[2\]. This limit can be set by changing the \[index.mapping.nested_objects.limit\] index level setting\./ create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/11_shard_header.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/11_shard_header.yml index 3fc10bc8db12d..6a2f852b221c2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/11_shard_header.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/11_shard_header.yml @@ -29,7 +29,6 @@ id: 1 - match: { _index: foobar } - - match: { _type: _doc } - match: { _id: "1"} - match: { _version: 2} - match: { _shards.total: 1} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/10_basic.yml index 1ab90e3efa83f..84f5fa67590e6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/10_basic.yml @@ -1,9 +1,5 @@ --- "Basic": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: exists: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/70_defaults.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/70_defaults.yml index 6fabdd59820cf..24e296130e405 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/70_defaults.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/70_defaults.yml @@ -1,9 +1,5 @@ --- "Client-side default type": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: index: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yml index bfe8da8d91519..6933d28a8492e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yml @@ -1,8 +1,4 @@ setup: - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: indices.create: index: test_1 @@ -34,7 +30,6 @@ setup: - is_true: matched - match: { explanation.value: 1 } - match: { _index: test_1 } - - match: { _type: _doc } - match: { _id: id_1 } --- @@ -51,7 +46,6 @@ setup: - is_true: matched - match: { explanation.value: 1 } - match: { _index: test_1 } - - match: { _type: _doc } - match: { _id: id_1 } --- diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/20_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/20_source_filtering.yml index ad596f980807b..3d2f42d31f4df 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/20_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/20_source_filtering.yml @@ -1,9 +1,5 @@ --- "Source filtering": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: index: index: test_1 @@ -16,7 +12,6 @@ - do: explain: { index: test_1, id: 1, _source: false, body: { query: { match_all: {}} } } - match: { _index: test_1 } - - match: { _type: _doc } - match: { _id: "1" } - is_false: get._source diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/30_query_string.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/30_query_string.yml index ac34d4c2495f2..5c9b391ded6b8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/30_query_string.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/30_query_string.yml @@ -1,9 +1,5 @@ --- "explain with query_string parameters": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: indices.create: index: test diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml index d125efa73011c..45e9a969c5982 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml @@ -149,13 +149,8 @@ setup: - is_false: fields.geo.keyword.on_aggregatable_indices --- "Get date_nanos field caps": - - skip: - version: " - 6.99.99" - reason: date_nanos field mapping type has been introcued in 7.0 - - do: indices.create: - include_type_name: false index: test_nanos body: mappings: @@ -204,10 +199,6 @@ setup: - is_false: fields.object\.nested2.keyword.non_searchable_indices --- "Get object and nested field caps": - - skip: - version: " - 6.99.99" - reason: object and nested fields are returned since 7.0 - - do: field_caps: index: 'test1,test2,test3' diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/10_basic.yml index 9183c70c29bce..822e96e405583 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/10_basic.yml @@ -1,10 +1,5 @@ --- "Basic": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: index: index: test_1 @@ -17,6 +12,5 @@ id: 中文 - match: { _index: test_1 } - - match: { _type: _doc } - match: { _id: 中文 } - match: { _source: { foo: "Hello: 中文" } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml index 67065270665cf..921397b238f51 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml @@ -1,9 +1,5 @@ --- "Default values": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: index: index: test_1 @@ -16,7 +12,6 @@ id: 1 - match: { _index: test_1 } - - match: { _type: _doc } - match: { _id: '1' } - match: { _source: { foo: "bar" } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml index ab27842e4516e..23c7e5cbc90a6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml @@ -1,9 +1,5 @@ --- "Stored fields": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: indices.create: index: test_1 @@ -29,7 +25,6 @@ stored_fields: foo - match: { _index: test_1 } - - match: { _type: _doc } - match: { _id: '1' } - match: { fields.foo: [bar] } - is_false: _source diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yml index 38130cee59810..d79a3bd300da8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yml @@ -18,7 +18,6 @@ id: 1 - match: {_index: "test_1"} - - match: { _type: _doc } - match: {_id: "1"} - match: {_version: 1} - match: {found: true} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml index f4a5ba39be3b8..8ef3ad708fc18 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml @@ -1,9 +1,5 @@ --- "Source filtering": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: indices.create: index: test_1 @@ -23,7 +19,6 @@ get: { index: test_1, id: 1, _source: false } - match: { _index: test_1 } - - match: { _type: _doc } - match: { _id: "1" } - is_false: _source @@ -62,7 +57,6 @@ _source: true - match: { _index: test_1 } - - match: { _type: _doc } - match: { _id: "1" } - match: { fields.count: [1] } - match: { _source.include.field1: v1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/80_missing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/80_missing.yml index d7d8edfc65dcb..30efd759c1a65 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/80_missing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/80_missing.yml @@ -1,9 +1,5 @@ --- "Missing document with catch": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: catch: missing get: @@ -12,10 +8,6 @@ --- "Missing document with ignore": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: get: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/10_basic.yml index 6f81c430c883a..887e31f33d45e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/10_basic.yml @@ -1,10 +1,5 @@ --- "Basic": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: index: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/15_default_values.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/15_default_values.yml index 57c11a1ca10e2..73fce7ce09bbf 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/15_default_values.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/15_default_values.yml @@ -1,11 +1,5 @@ --- "Default values": - - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: index: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/70_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/70_source_filtering.yml index 2665458cea95d..0836979fbf83a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/70_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/70_source_filtering.yml @@ -1,11 +1,5 @@ --- "Source filtering": - - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: index: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/10_with_id.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/10_with_id.yml index a129dcab80d9a..97eb9be1547ba 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/10_with_id.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/10_with_id.yml @@ -12,7 +12,6 @@ body: { foo: bar } - match: { _index: test-weird-index-中文 } - - match: { _type: _doc } - match: { _id: "1"} - match: { _version: 1} @@ -22,7 +21,6 @@ id: 1 - match: { _index: test-weird-index-中文 } - - match: { _type: _doc } - match: { _id: "1"} - match: { _version: 1} - match: { _source: { foo: bar }} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yml index f8a50415a95ef..478a731828738 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yml @@ -1,9 +1,5 @@ --- "Index result field": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: index: index: test_index diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/15_without_id.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/15_without_id.yml index 073a4704b4ef8..54f203e3621bc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/15_without_id.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/15_without_id.yml @@ -12,7 +12,6 @@ - is_true: _id - match: { _index: test_1 } - - match: { _type: _doc } - match: { _version: 1 } - set: { _id: id } @@ -22,7 +21,6 @@ id: '$id' - match: { _index: test_1 } - - match: { _type: _doc } - match: { _id: $id } - match: { _version: 1 } - match: { _source: { foo: bar }} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/30_cas.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/30_cas.yml index 550582e9816eb..27534131782a5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/30_cas.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/30_cas.yml @@ -1,10 +1,5 @@ --- "Compare And Swap Sequence Numbers": - - - skip: - version: " - 6.99.99" - reason: typesless api was introduces in 7.0 - - do: index: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/70_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/70_mix_typeless_typeful.yml deleted file mode 100644 index f3629fbb7cc18..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/70_mix_typeless_typeful.yml +++ /dev/null @@ -1,102 +0,0 @@ ---- -"Index with typeless API on an index that has types": - - - skip: - version: " - 6.99.99" - reason: Typeless APIs were introduced in 7.0.0 - - - do: - indices.create: # not using include_type_name: false on purpose - include_type_name: true - index: index - body: - mappings: - not_doc: - properties: - foo: - type: "keyword" - - - do: - index: - index: index - id: 1 - body: { foo: bar } - - - match: { _index: "index" } - - match: { _type: "_doc" } - - match: { _id: "1"} - - match: { _version: 1} - - - do: - get: # not using typeless API on purpose - index: index - type: not_doc - id: 1 - - - match: { _index: "index" } - - match: { _type: "not_doc" } # the important bit to check - - match: { _id: "1"} - - match: { _version: 1} - - match: { _source: { foo: bar }} - - - - do: - index: - index: index - body: { foo: bar } - - - match: { _index: "index" } - - match: { _type: "_doc" } - - match: { _version: 1} - - set: { _id: id } - - - do: - get: # using typeful API on purpose - index: index - type: not_doc - id: '$id' - - - match: { _index: "index" } - - match: { _type: "not_doc" } # the important bit to check - - match: { _id: $id} - - match: { _version: 1} - - match: { _source: { foo: bar }} - ---- -"Index call that introduces new field mappings": - - - skip: - version: " - 6.99.99" - reason: Typeless APIs were introduced in 7.0.0 - - - do: - indices.create: # not using include_type_name: false on purpose - include_type_name: true - index: index - body: - mappings: - not_doc: - properties: - foo: - type: "keyword" - - do: - index: - index: index - id: 2 - body: { new_field: value } - - - match: { _index: "index" } - - match: { _type: "_doc" } - - match: { _id: "2" } - - match: { _version: 1 } - - - do: - get: # using typeful API on purpose - index: index - type: not_doc - id: 2 - - - match: { _index: "index" } - - match: { _type: "not_doc" } - - match: { _id: "2" } - - match: { _version: 1} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/20_analyze_limit.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/20_analyze_limit.yml index 87d3b77aee329..37a14d9abb669 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/20_analyze_limit.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/20_analyze_limit.yml @@ -9,9 +9,6 @@ setup: --- "_analyze with No. generated tokens less than or equal to index.analyze.max_token_count should succeed": - - skip: - version: " - 6.99.99" - reason: index.analyze.max_token_count setting has been added in 7.0.0 - do: indices.analyze: index: test_1 @@ -25,9 +22,6 @@ setup: --- "_analyze with No. generated tokens more than index.analyze.max_token_count should fail": - - skip: - version: " - 6.99.99" - reason: index.analyze.max_token_count setting has been added in 7.0.0 - do: catch: /The number of tokens produced by calling _analyze has exceeded the allowed maximum of \[3\]. This limit can be set by changing the \[index.analyze.max_token_count\] index level setting\./ indices.analyze: @@ -39,9 +33,6 @@ setup: --- "_analyze with explain with No. generated tokens more than index.analyze.max_token_count should fail": - - skip: - version: " - 6.99.99" - reason: index.analyze.max_token_count setting has been added in 7.0.0 - do: catch: /The number of tokens produced by calling _analyze has exceeded the allowed maximum of \[3\]. This limit can be set by changing the \[index.analyze.max_token_count\] index level setting\./ indices.analyze: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml index 099226e41e6d3..94b23fb63adb5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml @@ -11,10 +11,6 @@ --- "clear_cache with fielddata set to true": - - skip: - version: " - 6.2.99" - reason: fielddata was deprecated before 6.3.0 - - do: indices.clear_cache: fielddata: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clone/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clone/10_basic.yml index 412d29905ffc2..a4d1841ed7108 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clone/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clone/10_basic.yml @@ -66,7 +66,6 @@ setup: id: "1" - match: { _index: target } - - match: { _type: _doc } - match: { _id: "1" } - match: { _source: { foo: "hello world" } } @@ -77,7 +76,6 @@ setup: id: "2" - match: { _index: target } - - match: { _type: _doc } - match: { _id: "2" } - match: { _source: { foo: "hello world 2" } } @@ -88,7 +86,6 @@ setup: id: "3" - match: { _index: target } - - match: { _type: _doc } - match: { _id: "3" } - match: { _source: { foo: "hello world 3" } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml index 30419f7738bbf..0f8c7a7a68f07 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml @@ -1,9 +1,5 @@ --- "Create index with mappings": - - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0.0 - do: indices.create: index: test_index @@ -19,10 +15,6 @@ --- "Create index with settings": - - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0.0 - do: indices.create: index: test_index @@ -38,10 +30,6 @@ --- "Create index": - - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0.0 - do: indices.create: index: test_index @@ -51,10 +39,6 @@ --- "Create index with wait_for_active_shards set to all": - - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0.0 - do: indices.create: index: test_index @@ -68,10 +52,6 @@ --- "Create index with aliases": - - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0.0 - do: indices.create: index: test_index @@ -102,9 +82,6 @@ --- "Create index with write aliases": - - skip: - version: " - 6.99.99" - reason: is_write_index is not implemented in ES <= 6.x - do: indices.create: index: test_index @@ -135,23 +112,3 @@ properties: "": type: keyword - ---- -"Create index with explicit _doc type": - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0 - - do: - catch: bad_request - indices.create: - index: test_index - body: - mappings: - _doc: - properties: - field: - type: keyword - - - match: { error.type: "illegal_argument_exception" } - - match: { error.reason: "The mapping definition cannot be nested under a type [_doc] unless include_type_name is set to true." } - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.flush/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.flush/10_basic.yml index 0fdb4d08543e4..29b3c1208a7b5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.flush/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.flush/10_basic.yml @@ -1,8 +1,5 @@ --- "Flush stats": - - skip: - version: " - 6.2.99" - reason: periodic flush stats is introduced in 6.3.0 - do: indices.create: index: test diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/11_basic_with_types.yml deleted file mode 100644 index 413c4bcb8d28c..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/11_basic_with_types.yml +++ /dev/null @@ -1,78 +0,0 @@ ---- -setup: - - - do: - indices.create: - include_type_name: true - index: test_index - body: - aliases: - test_alias: {} - test_blias: {} - mappings: - type_1: {} - settings: - number_of_shards: 1 - number_of_replicas: 1 - - - do: - indices.create: - index: test_index_2 - body: - settings: - number_of_shards: 1 - number_of_replicas: 2 - aliases: - test_alias: {} - test_blias: {} - - - do: - indices.create: - index: test_index_3 - body: - aliases: - test_alias: {} - test_blias: {} - - - do: - indices.close: - index: test_index_3 - - - do: - cluster.health: - wait_for_status: yellow - ---- -"Test include_type_name": - - skip: - version: " - 6.6.99" - reason: the include_type_name parameter is not supported before 6.7 - - - do: - indices.get: - include_type_name: true - index: test_index - - - is_true: test_index.mappings - - is_true: test_index.mappings.type_1 - - - do: - indices.get: - include_type_name: false - index: test_index - - - is_true: test_index.mappings - - is_false: test_index.mappings.type_1 - ---- -"Test include_type_name dafaults to false": - - skip: - version: " - 6.99.99" - reason: the include_type_name parameter default is different on 6.x and 7.0, so only test this on 7.0 clusters - - - do: - indices.get: - index: test_index - - - is_true: test_index.mappings - - is_false: test_index.mappings.type_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_alias/30_wildcards.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_alias/30_wildcards.yml index 08b3009be0e88..389166a03136e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_alias/30_wildcards.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_alias/30_wildcards.yml @@ -26,9 +26,6 @@ setup: --- "Get aliases wildcard and simple exclusion": - - skip: - version: " - 6.99.99" - reason: Exclusions in the alias expression are not handled - do: indices.get_alias: name: test_blias_2,test_alias*,-test_alias_1 @@ -41,9 +38,6 @@ setup: --- "Get aliases and wildcard exclusion": - - skip: - version: " - 6.99.99" - reason: Exclusions in the alias expression are not handled - do: indices.get_alias: name: test_alias_1,test_blias_1,-test_alias* @@ -66,9 +60,6 @@ setup: --- "Non-existent exclusion alias before wildcard returns 404": - - skip: - version: " - 6.99.99" - reason: Exclusions in the alias expression are not handled - do: catch: missing indices.get_alias: @@ -97,9 +88,6 @@ setup: --- "Missing exclusions does not fire 404": - - skip: - version: " - 6.99.99" - reason: Exclusions in the alias expression are not handled - do: indices.get_alias: name: test_alias*,-non-existent,test_blias*,-test @@ -112,9 +100,6 @@ setup: --- "Exclusion of non wildcarded aliases": - - skip: - version: " - 6.99.99" - reason: Exclusions in the alias expression are not handled - do: indices.get_alias: name: test_alias_1,test_blias_2,-test_alias*,-test_blias_2 @@ -123,9 +108,6 @@ setup: --- "Wildcard exclusions does not trigger 404": - - skip: - version: " - 6.99.99" - reason: Exclusions in the alias expression are not handled - do: catch: missing indices.get_alias: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yml index 84f2a0210fcf4..b132aa6bf03de 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yml @@ -1,8 +1,5 @@ --- setup: - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0 - do: indices.create: index: test_index diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/11_basic_with_types.yml deleted file mode 100644 index 0a7f5fa3560ba..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/11_basic_with_types.yml +++ /dev/null @@ -1,83 +0,0 @@ ---- -setup: - - do: - indices.create: - include_type_name: true - index: test_index - body: - mappings: - test_type: - properties: - text: - type: text - ---- -"Get field mapping with no index and type": - - - do: - indices.get_field_mapping: - include_type_name: true - fields: text - - - match: {test_index.mappings.test_type.text.mapping.text.type: text} - ---- -"Get field mapping by index only": - - do: - indices.get_field_mapping: - include_type_name: true - index: test_index - fields: text - - - match: {test_index.mappings.test_type.text.mapping.text.type: text} - ---- -"Get field mapping by type & field": - - - do: - indices.get_field_mapping: - include_type_name: true - index: test_index - type: test_type - fields: text - - - match: {test_index.mappings.test_type.text.mapping.text.type: text} - ---- -"Get field mapping by type & field, with another field that doesn't exist": - - - do: - indices.get_field_mapping: - include_type_name: true - index: test_index - type: test_type - fields: [ text , text1 ] - - - match: {test_index.mappings.test_type.text.mapping.text.type: text} - - is_false: test_index.mappings.test_type.text1 - ---- -"Get field mapping with include_defaults": - - - do: - indices.get_field_mapping: - include_type_name: true - index: test_index - type: test_type - fields: text - include_defaults: true - - - match: {test_index.mappings.test_type.text.mapping.text.type: text} - - match: {test_index.mappings.test_type.text.mapping.text.analyzer: default} - ---- -"Get field mapping should work without index specifying type and fields": - - - do: - indices.get_field_mapping: - include_type_name: true - type: test_type - fields: text - - - match: {test_index.mappings.test_type.text.mapping.text.type: text} - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml index 1570ded351874..a36f807e63e0e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml @@ -1,8 +1,9 @@ --- -"Return empty object if field doesn't exist, but type and index do": +"Return empty object if field doesn't exist, but index does": - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0 + version: "all" + reason: "AwaitsFix https://github.com/opensearch-project/OpenSearch/issues/2440" + - do: indices.create: index: test_index @@ -17,5 +18,7 @@ indices.get_field_mapping: index: test_index fields: not_existent + ignore: 404 # ignore 404 failures for now + # see: https://github.com/opensearch-project/OpenSearch/issues/2440 - match: { 'test_index.mappings': {}} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/21_missing_field_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/21_missing_field_with_types.yml deleted file mode 100644 index 264d187ebd22d..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/21_missing_field_with_types.yml +++ /dev/null @@ -1,23 +0,0 @@ ---- -"Return empty object if field doesn't exist, but type and index do": - - - do: - indices.create: - include_type_name: true - index: test_index - body: - mappings: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - indices.get_field_mapping: - include_type_name: true - index: test_index - type: test_type - fields: not_existent - - - match: { '': {}} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/30_missing_type.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/30_missing_type.yml deleted file mode 100644 index 0bf3f1f7823ee..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/30_missing_type.yml +++ /dev/null @@ -1,22 +0,0 @@ ---- -"Raise 404 when type doesn't exist": - - - do: - indices.create: - include_type_name: true - index: test_index - body: - mappings: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - catch: missing - indices.get_field_mapping: - include_type_name: true - index: test_index - type: not_test_type - fields: text diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yml index 7db61d122e7ce..2c9ff58b445df 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yml @@ -1,8 +1,5 @@ --- setup: - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0 - do: indices.create: index: test_index diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/51_field_wildcards_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/51_field_wildcards_with_types.yml deleted file mode 100644 index 68c183e9b292e..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/51_field_wildcards_with_types.yml +++ /dev/null @@ -1,144 +0,0 @@ ---- -setup: - - do: - indices.create: - include_type_name: true - index: test_index - body: - mappings: - test_type: - properties: - t1: - type: text - t2: - type: text - obj: - properties: - t1: - type: text - i_t1: - type: text - i_t3: - type: text - - - do: - indices.create: - include_type_name: true - index: test_index_2 - body: - mappings: - test_type_2: - properties: - t1: - type: text - t2: - type: text - obj: - properties: - t1: - type: text - i_t1: - type: text - i_t3: - type: text - ---- -"Get field mapping with * for fields": - - - do: - indices.get_field_mapping: - include_type_name: true - fields: "*" - - - match: {test_index.mappings.test_type.t1.full_name: t1 } - - match: {test_index.mappings.test_type.t2.full_name: t2 } - - match: {test_index.mappings.test_type.obj\.t1.full_name: obj.t1 } - - match: {test_index.mappings.test_type.obj\.i_t1.full_name: obj.i_t1 } - - match: {test_index.mappings.test_type.obj\.i_t3.full_name: obj.i_t3 } - ---- -"Get field mapping with t* for fields": - - - do: - indices.get_field_mapping: - include_type_name: true - index: test_index - fields: "t*" - - - match: {test_index.mappings.test_type.t1.full_name: t1 } - - match: {test_index.mappings.test_type.t2.full_name: t2 } - - length: {test_index.mappings.test_type: 2} - ---- -"Get field mapping with *t1 for fields": - - - do: - indices.get_field_mapping: - include_type_name: true - index: test_index - fields: "*t1" - - match: {test_index.mappings.test_type.t1.full_name: t1 } - - match: {test_index.mappings.test_type.obj\.t1.full_name: obj.t1 } - - match: {test_index.mappings.test_type.obj\.i_t1.full_name: obj.i_t1 } - - length: {test_index.mappings.test_type: 3} - ---- -"Get field mapping with wildcarded relative names": - - - do: - indices.get_field_mapping: - include_type_name: true - index: test_index - fields: "obj.i_*" - - match: {test_index.mappings.test_type.obj\.i_t1.full_name: obj.i_t1 } - - match: {test_index.mappings.test_type.obj\.i_t3.full_name: obj.i_t3 } - - length: {test_index.mappings.test_type: 2} - ---- -"Get field mapping should work using '_all' for indices and types": - - - do: - indices.get_field_mapping: - include_type_name: true - index: _all - type: _all - fields: "t*" - - match: {test_index.mappings.test_type.t1.full_name: t1 } - - match: {test_index.mappings.test_type.t2.full_name: t2 } - - length: {test_index.mappings.test_type: 2} - - match: {test_index_2.mappings.test_type_2.t1.full_name: t1 } - - match: {test_index_2.mappings.test_type_2.t2.full_name: t2 } - - length: {test_index_2.mappings.test_type_2: 2} - ---- -"Get field mapping should work using '*' for indices and types": - - - do: - indices.get_field_mapping: - include_type_name: true - index: '*' - type: '*' - fields: "t*" - - match: {test_index.mappings.test_type.t1.full_name: t1 } - - match: {test_index.mappings.test_type.t2.full_name: t2 } - - length: {test_index.mappings.test_type: 2} - - match: {test_index_2.mappings.test_type_2.t1.full_name: t1 } - - match: {test_index_2.mappings.test_type_2.t2.full_name: t2 } - - length: {test_index_2.mappings.test_type_2: 2} - ---- -"Get field mapping should work using comma_separated values for indices and types": - - - do: - indices.get_field_mapping: - include_type_name: true - index: 'test_index,test_index_2' - type: 'test_type,test_type_2' - fields: "t*" - - match: {test_index.mappings.test_type.t1.full_name: t1 } - - match: {test_index.mappings.test_type.t2.full_name: t2 } - - length: {test_index.mappings.test_type: 2} - - match: {test_index_2.mappings.test_type_2.t1.full_name: t1 } - - match: {test_index_2.mappings.test_type_2.t2.full_name: t2 } - - length: {test_index_2.mappings.test_type_2: 2} - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/60_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/60_mix_typeless_typeful.yml deleted file mode 100644 index 2b6433a3e98f8..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/60_mix_typeless_typeful.yml +++ /dev/null @@ -1,21 +0,0 @@ ---- -"GET mapping with typeless API on an index that has types": - - - do: - indices.create: # not using include_type_name: false on purpose - include_type_name: true - index: index - body: - mappings: - not_doc: - properties: - foo: - type: "keyword" - - - do: - indices.get_field_mapping: - include_type_name: false - index: index - fields: foo - - - match: { index.mappings.foo.mapping.foo.type: "keyword" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml index c3addd95469d4..e46f67326a8d2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml @@ -1,8 +1,5 @@ --- setup: - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0 - do: indices.create: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/11_basic_with_types.yml deleted file mode 100644 index 598cc24f7806b..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/11_basic_with_types.yml +++ /dev/null @@ -1,158 +0,0 @@ ---- -setup: - - do: - indices.create: - include_type_name: true - index: test_1 - body: - mappings: - doc: {} - - do: - indices.create: - include_type_name: true - index: test_2 - body: - mappings: - doc: {} ---- -"Get /{index}/_mapping with empty mappings": - - - do: - indices.create: - index: t - - - do: - indices.get_mapping: - include_type_name: true - index: t - - - match: { t.mappings: {}} - ---- -"Get /_mapping": - - - do: - indices.get_mapping: - include_type_name: true - - - is_true: test_1.mappings.doc - - is_true: test_2.mappings.doc - ---- -"Get /{index}/_mapping": - - - do: - indices.get_mapping: - include_type_name: true - index: test_1 - - - is_true: test_1.mappings.doc - - is_false: test_2 - - ---- -"Get /{index}/_mapping/_all": - - - do: - indices.get_mapping: - include_type_name: true - index: test_1 - type: _all - - - is_true: test_1.mappings.doc - - is_false: test_2 - ---- -"Get /{index}/_mapping/*": - - - do: - indices.get_mapping: - include_type_name: true - index: test_1 - type: '*' - - - is_true: test_1.mappings.doc - - is_false: test_2 - ---- -"Get /{index}/_mapping/{type}": - - - do: - indices.get_mapping: - include_type_name: true - index: test_1 - type: doc - - - is_true: test_1.mappings.doc - - is_false: test_2 - ---- -"Get /{index}/_mapping/{type*}": - - - do: - indices.get_mapping: - include_type_name: true - index: test_1 - type: 'd*' - - - is_true: test_1.mappings.doc - - is_false: test_2 - ---- -"Get /_mapping/{type}": - - - do: - indices.get_mapping: - include_type_name: true - type: doc - - - is_true: test_1.mappings.doc - - is_true: test_2.mappings.doc - ---- -"Get /_all/_mapping/{type}": - - - do: - indices.get_mapping: - include_type_name: true - index: _all - type: doc - - - is_true: test_1.mappings.doc - - is_true: test_2.mappings.doc - ---- -"Get /*/_mapping/{type}": - - - do: - indices.get_mapping: - include_type_name: true - index: '*' - type: doc - - - is_true: test_1.mappings.doc - - is_true: test_2.mappings.doc - ---- -"Get /index,index/_mapping/{type}": - - - do: - indices.get_mapping: - include_type_name: true - index: test_1,test_2 - type: doc - - - is_true: test_1.mappings.doc - - is_true: test_2.mappings.doc - ---- -"Get /index*/_mapping/{type}": - - - do: - indices.get_mapping: - include_type_name: true - index: '*2' - type: doc - - - is_true: test_2.mappings.doc - - is_false: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/20_missing_type.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/20_missing_type.yml deleted file mode 100644 index f17fb6a595305..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/20_missing_type.yml +++ /dev/null @@ -1,106 +0,0 @@ ---- -"Non-existent type returns 404": - - do: - indices.create: - include_type_name: true - index: test_index - body: - mappings: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - catch: missing - indices.get_mapping: - include_type_name: true - index: test_index - type: not_test_type - - - match: { status: 404 } - - match: { error.reason: 'type[[not_test_type]] missing' } - ---- -"No type matching pattern returns 404": - - do: - indices.create: - include_type_name: true - index: test_index - body: - mappings: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - catch: missing - indices.get_mapping: - include_type_name: true - index: test_index - type: test*,not* - - - match: { status: 404 } - - match: { error: 'type [not*] missing' } - - is_true: test_index.mappings.test_type - ---- -"Existent and non-existent type returns 404 and the existing type": - - do: - indices.create: - include_type_name: true - index: test_index - body: - mappings: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - catch: missing - indices.get_mapping: - include_type_name: true - index: test_index - type: test_type,not_test_type - - - match: { status: 404 } - - match: { error: 'type [not_test_type] missing' } - - is_true: test_index.mappings.test_type - ---- -"Existent and non-existent types returns 404 and the existing type": - - do: - indices.create: - include_type_name: true - index: test_index - body: - mappings: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - catch: missing - indices.get_mapping: - include_type_name: true - index: test_index - type: test_type,not_test_type,another_not_test_type - - - match: { status: 404 } - - match: { error: 'types [another_not_test_type,not_test_type] missing' } - - is_true: test_index.mappings.test_type - ---- -"Type missing when no types exist": - - do: - catch: missing - indices.get_mapping: - include_type_name: true - type: not_test_type diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml index 5a7624265ecc9..1bbfbc4f4c967 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml @@ -4,7 +4,7 @@ catch: missing indices.get_mapping: index: test_index - + --- "Index missing, no indexes": - do: @@ -14,9 +14,6 @@ --- "Index missing, ignore_unavailable=true": - - skip: - version: " - 6.99.99" - reason: ignore_unavailable was ignored in previous versions - do: indices.get_mapping: index: test_index diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/40_aliases.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/40_aliases.yml index 15a52b7b2db25..956b80ce16b52 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/40_aliases.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/40_aliases.yml @@ -18,7 +18,6 @@ - do: indices.get_mapping: - include_type_name: false index: test_alias - match: {test_index.mappings.properties.text.type: text} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/50_wildcard_expansion.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/50_wildcard_expansion.yml index d3f15b3292285..7f6f3999c868d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/50_wildcard_expansion.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/50_wildcard_expansion.yml @@ -102,9 +102,6 @@ setup: --- "Get test-* with wildcard_expansion=none": - - skip: - version: " - 6.99.99" - reason: allow_no_indices (defaults to true) was ignored in previous versions - do: indices.get_mapping: index: test-x* @@ -113,9 +110,6 @@ setup: - match: { '': {} } --- "Get test-* with wildcard_expansion=none allow_no_indices=false": - - skip: - version: " - 6.99.99" - reason: allow_no_indices was ignored in previous versions - do: catch: missing indices.get_mapping: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/61_empty_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/61_empty_with_types.yml deleted file mode 100644 index 6da7f4a2c6946..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/61_empty_with_types.yml +++ /dev/null @@ -1,20 +0,0 @@ ---- -setup: - - - do: - indices.create: - index: test_1 - - - do: - indices.create: - index: test_2 - ---- -"Check empty mapping when getting all mappings via /_mapping": - - - do: - indices.get_mapping: - include_type_name: true - - - match: { test_1.mappings: {}} - - match: { test_2.mappings: {}} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/70_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/70_mix_typeless_typeful.yml deleted file mode 100644 index 162a8d340d48a..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/70_mix_typeless_typeful.yml +++ /dev/null @@ -1,23 +0,0 @@ ---- -"GET mapping with typeless API on an index that has types": - - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - - do: - indices.create: # not using include_type_name: false on purpose - include_type_name: true - index: index - body: - mappings: - not_doc: - properties: - foo: - type: "keyword" - - - do: - indices.get_mapping: - index: index - - - match: { index.mappings.properties.foo.type: "keyword" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_settings/30_defaults.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_settings/30_defaults.yml index 2e3f4af03ebef..83e77140facbc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_settings/30_defaults.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_settings/30_defaults.yml @@ -10,9 +10,6 @@ setup: index: test-index --- Test retrieval of default settings: - - skip: - version: " - 6.3.99" - reason: include_defaults will not work in mixed-mode clusters containing nodes pre-6.4 - do: indices.get_settings: flat_settings: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/10_basic.yml index c1aac94bf1d84..9becbd54a3773 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/10_basic.yml @@ -1,7 +1,4 @@ setup: - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0.0 - do: indices.put_template: name: test diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/11_basic_with_types.yml deleted file mode 100644 index 0ecf304b1ce70..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/11_basic_with_types.yml +++ /dev/null @@ -1,48 +0,0 @@ -setup: - - do: - indices.put_template: - include_type_name: true - name: test - body: - index_patterns: test-* - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - _doc: - properties: - field: - type: keyword - ---- -"Get template": - - - do: - indices.get_template: - include_type_name: true - name: test - - - match: {test.index_patterns: ["test-*"]} - - match: {test.settings: {index: {number_of_shards: '1', number_of_replicas: '0'}}} - - match: {test.mappings: {_doc: {properties: {field: {type: keyword}}}}} - ---- -"Get template with no mappings": - - - do: - indices.put_template: - name: test_no_mappings - body: - index_patterns: test-* - settings: - number_of_shards: 1 - number_of_replicas: 0 - - - do: - indices.get_template: - include_type_name: true - name: test_no_mappings - - - match: {test_no_mappings.index_patterns: ["test-*"]} - - match: {test_no_mappings.settings: {index: {number_of_shards: '1', number_of_replicas: '0'}}} - - match: {test_no_mappings.mappings: {}} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.open/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.open/10_basic.yml index eb9f834ef4979..35e4c29f27d3e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.open/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.open/10_basic.yml @@ -38,10 +38,6 @@ --- "Open index with wait_for_active_shards set to all": - - skip: - version: " - 6.0.99" - reason: wait_for_active_shards parameter was added in 6.1.0 - - do: indices.create: index: test_index diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/10_basic.yml index ff68b04f20609..77338a6ddae0b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/10_basic.yml @@ -59,11 +59,6 @@ --- "Can set is_write_index": - - - skip: - version: " - 6.3.99" - reason: "is_write_index is only available from 6.4.0 on" - - do: indices.create: index: test_index diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml index 338eaba8881c3..36317c7ae173c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml @@ -1,8 +1,5 @@ --- "Test Create and update mapping": - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0 - do: indices.create: index: test_index @@ -53,10 +50,6 @@ --- "Create index with invalid mappings": - - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0 - do: indices.create: index: test_index @@ -72,14 +65,38 @@ --- "Put mappings with explicit _doc type": - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0 + version: " - 1.99.99" + reason: "deprecation message changed in 2.0" + - do: + indices.create: + index: test_index + + - do: + catch: bad_request + indices.put_mapping: + index: test_index + body: + _doc: + properties: + field: + type: keyword + + - match: { error.type: "illegal_argument_exception" } + - match: { error.reason: "Types cannot be provided in put mapping requests" } +--- +"Put mappings with explicit _doc type bwc": + - skip: + version: "2.0.0 - " + reason: "old deprecation message for pre 2.0" + features: "node_selector" - do: indices.create: index: test_index - do: + node_selector: + version: " - 1.99.99" catch: bad_request indices.put_mapping: index: test_index diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/11_basic_with_types.yml deleted file mode 100644 index 5da9cd4bf707c..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/11_basic_with_types.yml +++ /dev/null @@ -1,74 +0,0 @@ ---- -"Test Create and update mapping": - - do: - indices.create: - index: test_index - - - do: - indices.put_mapping: - include_type_name: true - index: test_index - type: test_type - body: - test_type: - properties: - text1: - type: text - analyzer: whitespace - text2: - type: text - analyzer: whitespace - subfield.text3: - type: text - - - do: - indices.get_mapping: - include_type_name: true - index: test_index - - - match: {test_index.mappings.test_type.properties.text1.type: text} - - match: {test_index.mappings.test_type.properties.text1.analyzer: whitespace} - - match: {test_index.mappings.test_type.properties.text2.type: text} - - match: {test_index.mappings.test_type.properties.text2.analyzer: whitespace} - - - do: - indices.put_mapping: - include_type_name: true - index: test_index - type: test_type - body: - test_type: - properties: - text1: - type: text - analyzer: whitespace - fields: - text_raw: - type: keyword - - - - do: - indices.get_mapping: - include_type_name: true - index: test_index - - - match: {test_index.mappings.test_type.properties.text1.type: text} - - match: {test_index.mappings.test_type.properties.subfield.properties.text3.type: text} - - match: {test_index.mappings.test_type.properties.text1.fields.text_raw.type: keyword} - ---- -"Create index with invalid mappings": - - do: - indices.create: - index: test_index - - do: - catch: /illegal_argument_exception/ - indices.put_mapping: - include_type_name: true - index: test_index - type: test_type - body: - test_type: - properties: - "": - type: keyword diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/20_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/20_mix_typeless_typeful.yml deleted file mode 100644 index 13cb3321841cf..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/20_mix_typeless_typeful.yml +++ /dev/null @@ -1,83 +0,0 @@ ---- -"PUT mapping with typeless API on an index that has types": - - - do: - indices.create: # not using include_type_name: false on purpose - include_type_name: true - index: index - body: - mappings: - not_doc: - properties: - foo: - type: "keyword" - - - do: - indices.put_mapping: - include_type_name: false - index: index - body: - properties: - bar: - type: "long" - - - do: - indices.get_mapping: - include_type_name: false - index: index - - - match: { index.mappings.properties.foo.type: "keyword" } - - match: { index.mappings.properties.bar.type: "long" } - - - do: - indices.put_mapping: - include_type_name: false - index: index - body: - properties: - foo: - type: "keyword" # also test no-op updates that trigger special logic wrt the mapping version - - - do: - catch: /the final mapping would have more than 1 type/ - indices.put_mapping: - include_type_name: true - index: index - type: some_other_type - body: - some_other_type: - properties: - bar: - type: "long" - - ---- -"PUT mapping with _doc on an index that has types": - - - skip: - version: " - 6.6.99" - reason: include_type_name is only supported as of 6.7 - - - - do: - indices.create: - include_type_name: true - index: index - body: - mappings: - my_type: - properties: - foo: - type: "keyword" - - - do: - catch: /the final mapping would have more than 1 type/ - indices.put_mapping: - include_type_name: true - index: index - type: _doc - body: - _doc: - properties: - bar: - type: "long" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options.yml index 182ec017e0d30..c1daa76fe3d6e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options.yml @@ -1,7 +1,4 @@ setup: - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0 - do: indices.create: index: test_index1 @@ -162,4 +159,4 @@ setup: indices.get_mapping: {} - match: {test_index1.mappings.properties.text.type: text} - + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options_with_types.yml deleted file mode 100644 index 6f9b6f7d9ceef..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options_with_types.yml +++ /dev/null @@ -1,227 +0,0 @@ -setup: - - do: - indices.create: - index: test_index1 - - do: - indices.create: - index: test_index2 - - do: - indices.create: - index: foo - - ---- -"put one mapping per index": - - do: - indices.put_mapping: - include_type_name: true - index: test_index1 - type: test_type - body: - test_type: - properties: - text: - type: text - analyzer: whitespace - - do: - indices.put_mapping: - include_type_name: true - index: test_index2 - type: test_type - body: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - - do: - indices.get_mapping: - include_type_name: true - - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} - - - match: { foo.mappings: {} } - ---- -"put mapping in _all index": - - - do: - indices.put_mapping: - include_type_name: true - index: _all - type: test_type - body: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - indices.get_mapping: - include_type_name: true - - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {foo.mappings.test_type.properties.text.type: text} - - match: {foo.mappings.test_type.properties.text.analyzer: whitespace} - ---- -"put mapping in * index": - - do: - indices.put_mapping: - include_type_name: true - index: "*" - type: test_type - body: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - indices.get_mapping: - include_type_name: true - - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {foo.mappings.test_type.properties.text.type: text} - - match: {foo.mappings.test_type.properties.text.analyzer: whitespace} - ---- -"put mapping in prefix* index": - - do: - indices.put_mapping: - include_type_name: true - index: "test_index*" - type: test_type - body: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - indices.get_mapping: - include_type_name: true - - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} - - - match: { foo.mappings: {} } - ---- -"put mapping in list of indices": - - do: - indices.put_mapping: - include_type_name: true - index: [test_index1, test_index2] - type: test_type - body: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - indices.get_mapping: - include_type_name: true - - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} - - - match: { foo.mappings: {} } - ---- -"put mapping with blank index": - - do: - indices.put_mapping: - include_type_name: true - type: test_type - body: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - indices.get_mapping: - include_type_name: true - - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {foo.mappings.test_type.properties.text.type: text} - - match: {foo.mappings.test_type.properties.text.analyzer: whitespace} - ---- -"put mapping with missing type": - - - - do: - catch: param - indices.put_mapping: - include_type_name: true - ---- -"post a mapping with default analyzer twice": - - - do: - indices.put_mapping: - include_type_name: true - index: test_index1 - type: test_type - body: - test_type: - dynamic: false - properties: - text: - analyzer: default - type: text - - - do: - indices.put_mapping: - include_type_name: true - index: test_index1 - type: test_type - body: - test_type: - dynamic: false - properties: - text: - analyzer: default - type: text - - - do: - indices.get_mapping: - include_type_name: true - - - match: {test_index1.mappings.test_type.properties.text.type: text} - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml index f7a83442ca2e2..5b40ad0771c70 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml @@ -1,9 +1,5 @@ --- "Put template": - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0.0 - - do: indices.put_template: name: test @@ -28,10 +24,6 @@ --- "Put multiple template": - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0.0 - - do: indices.put_template: name: test @@ -56,10 +48,6 @@ --- "Put template with empty mappings": - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0.0 - - do: indices.put_template: name: test @@ -238,24 +226,3 @@ indices.put_template: name: test body: {} - ---- -"Put template with explicit _doc type": - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0 - - - do: - catch: bad_request - indices.put_template: - name: test - body: - index_patterns: test-* - mappings: - _doc: - properties: - field: - type: keyword - - - match: { error.type: "illegal_argument_exception" } - - match: { error.reason: "The mapping definition cannot be nested under a type [_doc] unless include_type_name is set to true." } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/11_basic_with_types.yml deleted file mode 100644 index fde28db3c691d..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/11_basic_with_types.yml +++ /dev/null @@ -1,74 +0,0 @@ ---- -"Put template": - - do: - indices.put_template: - include_type_name: true - name: test - body: - index_patterns: test-* - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - _doc: - properties: - field: - type: keyword - - - do: - indices.get_template: - include_type_name: true - name: test - flat_settings: true - - - match: {test.index_patterns: ["test-*"]} - - match: {test.settings: {index.number_of_shards: '1', index.number_of_replicas: '0'}} - - match: {test.mappings: {_doc: {properties: {field: {type: keyword}}}}} - ---- -"Put multiple template": - - do: - indices.put_template: - include_type_name: true - name: test - body: - index_patterns: [test-*, test2-*] - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - _doc: - properties: - field: - type: text - - - do: - indices.get_template: - include_type_name: true - name: test - flat_settings: true - - - match: {test.index_patterns: ["test-*", "test2-*"]} - - match: {test.settings: {index.number_of_shards: '1', index.number_of_replicas: '0'}} - - match: {test.mappings: {_doc: {properties: {field: {type: text}}}}} - ---- -"Put template with empty mappings": - - do: - indices.put_template: - include_type_name: true - name: test - body: - index_patterns: test-* - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: {} - - - do: - indices.get_template: - include_type_name: true - name: test - flat_settings: true - - - match: {test.mappings: {}} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/30_max_size_condition.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/30_max_size_condition.yml index c99f736a24979..95c0ff509f304 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/30_max_size_condition.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/30_max_size_condition.yml @@ -1,10 +1,5 @@ --- "Rollover with max_size condition": - - - skip: - version: " - 6.0.99" - reason: max_size condition is introduced in 6.1.0 - # create index with alias and replica - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/40_mapping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/40_mapping.yml index 47b004326a457..b669c2ab75176 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/40_mapping.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/40_mapping.yml @@ -1,9 +1,5 @@ --- "Typeless mapping": - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0.0 - - do: indices.create: index: logs-1 @@ -41,32 +37,3 @@ - match: { conditions: { "[max_docs: 2]": true } } - match: { rolled_over: true } - ---- -"Mappings with explicit _doc type": - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0 - - - do: - indices.create: - index: logs-1 - body: - aliases: - logs_search: {} - - - do: - catch: bad_request - indices.rollover: - alias: "logs_search" - body: - conditions: - max_docs: 2 - mappings: - _doc: - properties: - field: - type: keyword - - - match: { error.caused_by.type: "illegal_argument_exception" } - - match: { error.caused_by.reason: "The mapping definition cannot be nested under a type [_doc] unless include_type_name is set to true." } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml index 41c851b71cc6c..a5b1cb8607b3a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml @@ -1,10 +1,5 @@ --- "Shrink index via API": - - skip: - version: " - 6.9.99" - reason: expects warnings that pre-7.0.0 will not send - features: [warnings, arbitrary_key] - # creates an index with one document solely allocated on a particular data node # and shrinks it into a new index with a single shard # we don't do the relocation to a single node after the index is created @@ -40,7 +35,6 @@ id: "1" - match: { _index: source } - - match: { _type: _doc } - match: { _id: "1" } - match: { _source: { foo: "hello world" } } @@ -78,6 +72,5 @@ id: "1" - match: { _index: target } - - match: { _type: _doc } - match: { _id: "1" } - match: { _source: { foo: "hello world" } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml index 8fe8643d049ea..a744895c4ce38 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml @@ -1,10 +1,7 @@ --- "Copy settings during shrink index": - skip: - version: " - 6.9.99" - reason: expects warnings that pre-7.0.0 will not send - features: [allowed_warnings, arbitrary_key] - + features: allowed_warnings - do: nodes.info: node_id: data:true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml index 2baa82ea78842..4ae1d0002a237 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml @@ -29,11 +29,6 @@ setup: --- "Split index via API": - - skip: - version: " - 6.9.99" - reason: pre-7.0.0 will send warnings - features: "warnings" - # make it read-only - do: indices.put_settings: @@ -69,7 +64,6 @@ setup: id: "1" - match: { _index: target } - - match: { _type: _doc } - match: { _id: "1" } - match: { _source: { foo: "hello world" } } @@ -80,7 +74,6 @@ setup: id: "2" - match: { _index: target } - - match: { _type: _doc } - match: { _id: "2" } - match: { _source: { foo: "hello world 2" } } @@ -91,16 +84,12 @@ setup: id: "3" - match: { _index: target } - - match: { _type: _doc } - match: { _id: "3" } - match: { _source: { foo: "hello world 3" } } --- "Split from 1 to N": - - skip: - version: " - 6.99.99" - reason: automatic preparation for splitting was added in 7.0.0 - do: indices.create: index: source_one_shard @@ -162,7 +151,6 @@ setup: id: "1" - match: { _index: target } - - match: { _type: _doc } - match: { _id: "1" } - match: { _source: { foo: "hello world" } } @@ -173,7 +161,6 @@ setup: id: "2" - match: { _index: target } - - match: { _type: _doc } - match: { _id: "2" } - match: { _source: { foo: "hello world 2" } } @@ -184,17 +171,11 @@ setup: id: "3" - match: { _index: target } - - match: { _type: _doc } - match: { _id: "3" } - match: { _source: { foo: "hello world 3" } } --- "Create illegal split indices": - - skip: - version: " - 6.9.99" - reason: pre-7.0.0 will send warnings - features: "warnings" - # try to do an illegal split with number_of_routing_shards set - do: catch: /illegal_argument_exception/ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml index 3740167a0253a..c86e49aac0561 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml @@ -1,10 +1,5 @@ --- "Split index ignores target template mapping": - - skip: - version: " - 6.9.99" - reason: pre-7.0.0 will send warnings - features: "warnings" - # create index - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml index 46517f6055f32..0ceacf1f064ca 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml @@ -1,10 +1,7 @@ --- "Copy settings during split index": - skip: - version: " - 6.9.99" - reason: expects warnings that pre-7.0.0 will not send - features: [arbitrary_key, allowed_warnings] - + features: allowed_warnings - do: nodes.info: node_id: data:true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml index 1a650ee88eae6..54b4163ee6502 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml @@ -39,10 +39,6 @@ setup: --- "Index - all": - - skip: - version: " - 6.3.99" - reason: "uuid is only available from 6.4.0 on" - - do: indices.stats: { index: _all } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/20_translog.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/20_translog.yml index 711bdc8d4ca3d..e817bc0d27337 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/20_translog.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/20_translog.yml @@ -86,9 +86,6 @@ --- "Translog last modified age stats": - - skip: - version: " - 6.2.99" - reason: translog last modified age stats was added in 6.3.0 - do: index: index: test diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/10_basic.yml index 798d699ae80a0..854c460c535c0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/10_basic.yml @@ -1,9 +1,5 @@ --- "Basic multi-get": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: indices.create: index: test_2 @@ -26,17 +22,14 @@ - is_false: docs.0.found - match: { docs.0._index: test_2 } - - match: { docs.0._type: null } - match: { docs.0._id: "1" } - is_false: docs.1.found - match: { docs.1._index: test_1 } - - match: { docs.1._type: _doc } - match: { docs.1._id: "2" } - is_true: docs.2.found - match: { docs.2._index: test_1 } - - match: { docs.2._type: _doc } - match: { docs.2._id: "1" } - match: { docs.2._version: 1 } - match: { docs.2._source: { foo: bar }} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/12_non_existent_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/12_non_existent_index.yml index a1101a903f896..38ca8da20dd5d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/12_non_existent_index.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/12_non_existent_index.yml @@ -1,9 +1,5 @@ --- "Non-existent index": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: index: index: test_1 @@ -18,7 +14,6 @@ - is_false: docs.0.found - match: { docs.0._index: test_2 } - - match: { docs.0._type: null } - match: { docs.0._id: "1" } - do: @@ -29,5 +24,4 @@ - is_true: docs.0.found - match: { docs.0._index: test_1 } - - match: { docs.0._type: _doc } - match: { docs.0._id: "1" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/13_missing_metadata.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/13_missing_metadata.yml index 2711bed58dbb1..eb46d45f027d3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/13_missing_metadata.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/13_missing_metadata.yml @@ -1,9 +1,5 @@ --- "Missing metadata": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: index: index: test_1 @@ -43,7 +39,6 @@ - is_true: docs.0.found - match: { docs.0._index: test_1 } - - match: { docs.0._type: _doc } - match: { docs.0._id: "1" } - match: { docs.0._version: 1 } - match: { docs.0._source: { foo: bar }} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/14_alias_to_multiple_indices.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/14_alias_to_multiple_indices.yml index 9c1d0242b05c9..825dc256d786a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/14_alias_to_multiple_indices.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/14_alias_to_multiple_indices.yml @@ -8,11 +8,11 @@ bulk: refresh: true body: | - {"index": {"_index": "test_1", "_type": "_doc", "_id": 1}} + {"index": {"_index": "test_1", "_id": 1}} { "foo": "bar" } - {"index": {"_index": "test_2", "_type": "_doc", "_id": 2}} + {"index": {"_index": "test_2", "_id": 2}} { "foo": "bar" } - {"index": {"_index": "test_3", "_type": "_doc", "_id": 3}} + {"index": {"_index": "test_3", "_id": 3}} { "foo": "bar" } - do: @@ -34,12 +34,10 @@ - is_true: docs.0.found - match: { docs.0._index: test_1 } - - match: { docs.0._type: _doc } - match: { docs.0._id: "1" } - is_false: docs.1.found - match: { docs.1._index: test_two_and_three } - - match: { docs.1._type: null } - match: { docs.1._id: "2" } - match: { docs.1.error.root_cause.0.type: "illegal_argument_exception" } - match: { docs.1.error.root_cause.0.reason: "/[aA]lias.\\[test_two_and_three\\].has.more.than.one.index.associated.with.it.\\[test_[23]{1},.test_[23]{1}\\],.can't.execute.a.single.index.op/" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/15_ids.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/15_ids.yml index fbdc9b265a95a..f71b5e86dab56 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/15_ids.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/15_ids.yml @@ -1,9 +1,5 @@ --- "IDs": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: indices.create: index: test_1 @@ -28,14 +24,12 @@ - is_true: docs.0.found - match: { docs.0._index: test_1 } - - match: { docs.0._type: _doc } - match: { docs.0._id: "1" } - match: { docs.0._version: 1 } - match: { docs.0._source: { foo: bar }} - is_false: docs.1.found - match: { docs.1._index: test_1 } - - match: { docs.1._type: _doc } - match: { docs.1._id: "3" } - do: @@ -46,14 +40,12 @@ - is_true: docs.0.found - match: { docs.0._index: test_1 } - - match: { docs.0._type: _doc } - match: { docs.0._id: "1" } - match: { docs.0._version: 1 } - match: { docs.0._source: { foo: bar }} - is_true: docs.1.found - match: { docs.1._index: test_1 } - - match: { docs.1._type: _doc } - match: { docs.1._id: "2" } - match: { docs.1._version: 1 } - match: { docs.1._source: { foo: baz }} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/17_default_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/17_default_index.yml index d03f99be39517..15fd4320851e6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/17_default_index.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/17_default_index.yml @@ -1,9 +1,5 @@ --- "Default index/type": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: indices.create: index: test_2 @@ -24,17 +20,14 @@ - is_false: docs.0.found - match: { docs.0._index: test_2 } - - match: { docs.0._type: null } - match: { docs.0._id: "1" } - is_false: docs.1.found - match: { docs.1._index: test_1 } - - match: { docs.1._type: _doc } - match: { docs.1._id: "2" } - is_true: docs.2.found - match: { docs.2._index: test_1 } - - match: { docs.2._type: _doc } - match: { docs.2._id: "1" } - match: { docs.2._version: 1 } - match: { docs.2._source: { foo: bar }} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_stored_fields.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_stored_fields.yml index 45460deb04e0b..1c965a9573ae0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_stored_fields.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_stored_fields.yml @@ -1,9 +1,5 @@ --- "Stored fields": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: indices.create: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/40_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/40_routing.yml index df2924f274bdf..50bf9a158852b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/40_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/40_routing.yml @@ -40,6 +40,5 @@ - is_true: docs.2.found - match: { docs.2._index: test_1 } - - match: { docs.2._type: _doc } - match: { docs.2._id: "1" } - match: { docs.2._routing: "5" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/70_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/70_source_filtering.yml index 3a3086cf3616d..35a85cf9c0116 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/70_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/70_source_filtering.yml @@ -1,8 +1,4 @@ setup: - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: index: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/80_deprecated.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/80_deprecated.yml index 0283455350a80..2a1b4501eae17 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/80_deprecated.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/80_deprecated.yml @@ -1,11 +1,6 @@ --- "Deprecated parameters should fail in Multi Get query": - - skip: - version: " - 6.99.99" - reason: _version, _routing are removed starting from 7.0, their equivalents without underscore are used instead - features: "warnings" - - do: index: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mlt/20_docs.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mlt/20_docs.yml index bb1b25a0dcb40..50eb344d99048 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mlt/20_docs.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mlt/20_docs.yml @@ -41,12 +41,10 @@ like: - _index: test_1 - _type: _doc doc: foo: bar - _index: test_1 - _type: _doc _id: 2 - _id: 3 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mlt/30_unlike.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mlt/30_unlike.yml index abea4c8fbe57a..a0f96eb6b2d1f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mlt/30_unlike.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mlt/30_unlike.yml @@ -40,11 +40,9 @@ more_like_this: like: _index: test_1 - _type: _doc _id: 1 unlike: _index: test_1 - _type: _doc _id: 3 include: true min_doc_freq: 0 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml index 5b092c9d15e44..3a06946bffd70 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml @@ -94,10 +94,6 @@ setup: --- "Search with new response format": - - skip: - version: " - 6.99.99" - reason: hits.total is returned as an object in 7.0.0 - - do: msearch: body: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/10_basic.yml index 87c3e6065bba4..56e5b2eb88e0e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/10_basic.yml @@ -1,7 +1,4 @@ setup: - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: indices.create: index: testidx diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/20_deprecated.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/20_deprecated.yml index 376192680c99b..215c62c2a40ed 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/20_deprecated.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/20_deprecated.yml @@ -1,16 +1,5 @@ -setup: - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - --- "Deprecated camel case and _ parameters should fail in Term Vectors query": - - - skip: - version: " - 6.99.99" - reason: camel case and _ parameters (e.g. versionType, _version_type) should fail from 7.0 - features: "warnings" - - do: indices.create: index: testidx @@ -47,7 +36,6 @@ setup: "docs": - "_index" : "testidx" - "_type" : "_doc" "_id" : "testing_document" "version" : 1 "_version_type" : "external" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/30_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/30_mix_typeless_typeful.yml deleted file mode 100644 index b14b5f94ebbc2..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/30_mix_typeless_typeful.yml +++ /dev/null @@ -1,33 +0,0 @@ ---- -"mtermvectors without types on an index that has types": - - - skip: - version: " - 6.99.99" - reason: Typeless APIs were introduced in 7.0.0 - - - do: - indices.create: # not using include_type_name: false on purpose - include_type_name: true - index: index - body: - mappings: - not_doc: - properties: - foo: - type : "text" - term_vector : "with_positions_offsets" - - - do: - index: - index: index - id: 1 - body: { foo: bar } - - - do: - mtermvectors: - body: - docs: - - _index: index - _id: 1 - - - match: {docs.0.term_vectors.foo.terms.bar.term_freq: 1} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/10_basic.yml index aa6d1e9841dd7..9294c696d91e6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/10_basic.yml @@ -198,9 +198,6 @@ --- "Scroll cannot used the request cache": - - skip: - version: " - 6.99.99" - reason: the error message has been added in v7.0.0 - do: indices.create: index: test_scroll @@ -217,9 +214,6 @@ --- "Scroll with size 0": - - skip: - version: " - 6.1.99" - reason: the error message has been added in v6.2.0 - do: indices.create: index: test_scroll @@ -237,10 +231,6 @@ --- "Scroll max_score is null": - - skip: - version: " - 6.99.99" - reason: max_score was set to 0 rather than null before 7.0 - - do: indices.create: index: test_scroll @@ -285,9 +275,6 @@ --- "Scroll with new response format": - - skip: - version: " - 6.9.99" - reason: hits.total is returned as an object in 7.0.0 - do: indices.create: index: test_scroll diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/12_slices.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/12_slices.yml index f655b43b98949..228b1a73ecd2b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/12_slices.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/12_slices.yml @@ -103,10 +103,6 @@ setup: --- "Sliced scroll with invalid arguments": - - skip: - version: " - 6.99.99" - reason: Prior versions return 500 rather than 404 - - do: catch: bad_request search: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/20_keep_alive.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/20_keep_alive.yml index a7e75f80e3f6e..0f35f95be0bf6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/20_keep_alive.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/20_keep_alive.yml @@ -10,10 +10,6 @@ --- "Max keep alive": - - skip: - version: " - 6.99.99" - reason: search.max_keep_alive was added in 7.0.0 - - do: index: index: test_scroll diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml index ddb555b8cd5a5..d13636360d2f9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml @@ -49,10 +49,6 @@ setup: --- "top_hits aggregation with nested documents": - - skip: - version: " - 6.1.99" - reason: "<= 6.1 nodes don't always include index or id in nested top hits" - - do: search: rest_total_hits_as_int: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yml index 3683ad108e8c2..7c7a223044725 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yml @@ -702,11 +702,6 @@ setup: --- "Global ordinals are not loaded with the map execution hint": - - - skip: - version: " - 6.99.99" - reason: bug fixed in 7.0 - - do: index: refresh: true @@ -752,11 +747,6 @@ setup: --- "Global ordinals are loaded with the global_ordinals execution hint": - - - skip: - version: " - 6.99.99" - reason: bug fixed in 7.0 - - do: index: refresh: true @@ -830,6 +820,10 @@ setup: { "str": "cow", "number": 1 } { "index": {} } { "str": "pig", "number": 1 } + - do: + indices.forcemerge: + index: test_1 + max_num_segments: 1 - do: search: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml index 1ec3a302d6884..2e298441918bc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml @@ -84,10 +84,6 @@ setup: --- "Simple Composite aggregation": - - skip: - version: " - 6.0.99" - reason: this uses a new API that has been added in 6.1 - - do: search: rest_total_hits_as_int: true @@ -113,11 +109,6 @@ setup: --- "Nested Composite aggregation": - - skip: - version: " - 6.0.99" - reason: this uses a new API that has been added in 6.1 - - - do: search: rest_total_hits_as_int: true @@ -163,11 +154,6 @@ setup: --- "Aggregate After": - - skip: - version: " - 6.0.99" - reason: this uses a new API that has been added in 6.1 - - - do: search: rest_total_hits_as_int: true @@ -205,11 +191,6 @@ setup: --- "Aggregate After Missing": - - skip: - version: " - 6.1.99" - reason: bug fixed in 6.2.0 - - - do: search: rest_total_hits_as_int: true @@ -236,10 +217,6 @@ setup: --- "Invalid Composite aggregation": - - skip: - version: " - 6.0.99" - reason: this uses a new API that has been added in 6.1 - - do: catch: /\[composite\] aggregation cannot be used with a parent aggregation/ search: @@ -426,10 +403,6 @@ setup: --- "Composite aggregation with after_key in the response": - - skip: - version: " - 6.2.99" - reason: starting in 6.3.0 after_key is returned in the response - - do: search: rest_total_hits_as_int: true @@ -455,10 +428,6 @@ setup: --- "Composite aggregation and array size": - - skip: - version: " - 6.99.99" - reason: starting in 7.0 the composite aggregation throws an execption if the provided size is greater than search.max_buckets. - - do: catch: /.*Trying to create too many buckets.*/ search: @@ -481,10 +450,6 @@ setup: --- "Composite aggregation with nested parent": - - skip: - version: " - 6.99.99" - reason: the ability to set a nested parent aggregation was added in 7.0. - - do: search: rest_total_hits_as_int: true @@ -1030,3 +995,35 @@ setup: - length: { aggregations.test.buckets: 1 } - match: { aggregations.test.buckets.0.key.keyword: "foo" } - match: { aggregations.test.buckets.0.doc_count: 1 } +--- +"Simple Composite aggregation with missing order": + - skip: + version: " - 1.2.99" + reason: missing_order is supported in 1.3.0. + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + aggregations: + test: + composite: + sources: [ + "kw": { + "terms": { + "field": "keyword", + "missing_bucket": true, + "missing_order": "last" + } + } + ] + + - match: {hits.total: 6} + - length: { aggregations.test.buckets: 3 } + - match: { aggregations.test.buckets.0.key.kw: "bar" } + - match: { aggregations.test.buckets.0.doc_count: 3 } + - match: { aggregations.test.buckets.1.key.kw: "foo" } + - match: { aggregations.test.buckets.1.doc_count: 2 } + - match: { aggregations.test.buckets.2.key.kw: null } + - match: { aggregations.test.buckets.2.doc_count: 2 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml index 75349e9839746..339fe72b77730 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml @@ -1,8 +1,3 @@ -setup: - - skip: - version: " - 6.3.99" - reason: "moving_fn added in 6.4.0" - --- "Bad window": diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/260_weighted_avg.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/260_weighted_avg.yml index c5988fc9e5dc4..6b17132c751de 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/260_weighted_avg.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/260_weighted_avg.yml @@ -1,7 +1,4 @@ setup: - - skip: - version: " - 6.3.99" - reason: weighted_avg is only available as of 6.4.0 - do: indices.create: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/270_median_absolute_deviation_metric.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/270_median_absolute_deviation_metric.yml index 0cba08fccae9b..03797503436fb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/270_median_absolute_deviation_metric.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/270_median_absolute_deviation_metric.yml @@ -1,7 +1,4 @@ setup: - - skip: - version: " - 6.5.99" - reason: "added in 6.6.0" - do: indices.create: index: test diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/280_geohash_grid.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/280_geohash_grid.yml index 534e552fc0ea2..1368c87a77d7e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/280_geohash_grid.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/280_geohash_grid.yml @@ -1,7 +1,6 @@ setup: - do: indices.create: - include_type_name: false index: test_1 body: settings: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml index 2db498a0cacf0..dfd5b6c5f2583 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml @@ -4,7 +4,6 @@ setup: reason: "added in 7.0.0" - do: indices.create: - include_type_name: false index: test_1 body: settings: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/50_filter.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/50_filter.yml index bc4105af85e65..2b02c0447e6c8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/50_filter.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/50_filter.yml @@ -31,10 +31,6 @@ setup: --- "Filter aggs with terms lookup and ensure it's cached": # Because the filter agg rewrites the terms lookup in the rewrite phase the request can be cached - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: search: rest_total_hits_as_int: true @@ -78,10 +74,6 @@ setup: --- "As a child of terms": - - skip: - version: " - 6.99.99" - reason: the test is written for hits.total.value - - do: bulk: refresh: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.highlight/30_max_analyzed_offset.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.highlight/30_max_analyzed_offset.yml index 334708b54b066..462f4f5d25e0b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.highlight/30_max_analyzed_offset.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.highlight/30_max_analyzed_offset.yml @@ -28,9 +28,6 @@ setup: --- "Unified highlighter on a field WITHOUT OFFSETS exceeding index.highlight.max_analyzed_offset should FAIL": - - skip: - version: " - 6.99.99" - reason: index.highlight.max_analyzed_offset setting has been added in 7.0.0 - do: catch: bad_request search: @@ -42,9 +39,6 @@ setup: --- "Plain highlighter on a field WITHOUT OFFSETS exceeding index.highlight.max_analyzed_offset should FAIL": - - skip: - version: " - 6.99.99" - reason: index.highlight.max_analyzed_offset setting has been added in 7.0.0 - do: catch: bad_request search: @@ -56,9 +50,6 @@ setup: --- "Unified highlighter on a field WITH OFFSETS exceeding index.highlight.max_analyzed_offset should SUCCEED": - - skip: - version: " - 6.99.99" - reason: index.highligt.max_analyzed_offset setting has been added in 7.0.0 - do: search: rest_total_hits_as_int: true @@ -69,9 +60,6 @@ setup: --- "Plain highlighter on a field WITH OFFSETS exceeding index.highlight.max_analyzed_offset should FAIL": - - skip: - version: " - 6.99.99" - reason: index.highlight.max_analyzed_offset setting has been added in 7.0.0 - do: catch: bad_request search: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml index c3ffd930e9e1d..7b3b4e8233d0b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml @@ -30,10 +30,8 @@ setup: body: { "query" : { "nested" : { "path" : "nested_field", "query" : { "match_all" : {} }, "inner_hits" : {} } } } - match: { hits.total: 1 } - match: { hits.hits.0._index: "test" } - - match: { hits.hits.0._type: "_doc" } - match: { hits.hits.0._id: "1" } - match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._index: "test" } - - match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._type: "_doc" } - match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._id: "1" } - match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._nested.field: "nested_field" } - match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._nested.offset: 0 } @@ -63,7 +61,6 @@ setup: - match: { hits.total: 1 } - match: { hits.hits.0._index: "test" } - - match: { hits.hits.0._type: "_doc" } - match: { hits.hits.0._id: "1" } - match: { hits.hits.0._version: 1 } - match: { hits.hits.0.fields._seq_no: [0] } @@ -86,7 +83,6 @@ setup: - match: { hits.total: 1 } - match: { hits.hits.0._index: "test" } - - match: { hits.hits.0._type: "_doc" } - match: { hits.hits.0._id: "1" } - match: { hits.hits.0._version: 2 } - match: { hits.hits.0.fields._seq_no: [1] } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/100_stored_fields.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/100_stored_fields.yml index a82d7fff480eb..d2933a44e586d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/100_stored_fields.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/100_stored_fields.yml @@ -19,7 +19,6 @@ setup: index: test - is_true: hits.hits.0._id - - is_true: hits.hits.0._type - is_true: hits.hits.0._source - do: @@ -30,7 +29,6 @@ setup: stored_fields: [] - is_true: hits.hits.0._id - - is_true: hits.hits.0._type - is_false: hits.hits.0._source - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml index e89d340347a12..091638d6a07fb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml @@ -141,9 +141,6 @@ setup: --- "docvalue_fields": - - skip: - version: " - 6.9.99" - reason: Triggers a deprecation warning before 7.0 - do: search: body: @@ -152,9 +149,6 @@ setup: --- "multiple docvalue_fields": - - skip: - version: " - 6.9.99" - reason: Triggered a deprecation warning before 7.0 - do: search: body: @@ -163,9 +157,6 @@ setup: --- "docvalue_fields as url param": - - skip: - version: " - 6.99.99" - reason: Triggered a deprecation warning before 7.0 - do: search: docvalue_fields: [ "count" ] @@ -174,8 +165,6 @@ setup: --- "docvalue_fields with default format": - skip: - version: " - 6.99.99" - reason: Only triggers warnings on 7.0+ features: allowed_warnings - do: allowed_warnings: @@ -189,9 +178,6 @@ setup: --- "docvalue_fields with explicit format": - - skip: - version: " - 6.3.99" - reason: format option was added in 6.4 - do: search: body: @@ -202,9 +188,6 @@ setup: --- "docvalue_fields - double": - - skip: - version: " - 6.99.99" - reason: Triggered a deprecation warning before 7.0 - do: search: body: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml index ec279b8d0d5ec..149bc90f31ea0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml @@ -73,19 +73,16 @@ setup: - match: {hits.total: 6 } - length: {hits.hits: 3 } - match: {hits.hits.0._index: test } - - match: {hits.hits.0._type: _doc } - match: {hits.hits.0.fields.numeric_group: [3] } - match: {hits.hits.0.sort: [36] } - match: {hits.hits.0._id: "6" } - is_false: hits.hits.0.inner_hits - match: {hits.hits.1._index: test } - - match: {hits.hits.1._type: _doc } - match: {hits.hits.1.fields.numeric_group: [1] } - match: {hits.hits.1.sort: [24] } - match: {hits.hits.1._id: "3" } - is_false: hits.hits.1.inner_hits - match: {hits.hits.2._index: test } - - match: {hits.hits.2._type: _doc } - match: {hits.hits.2.fields.numeric_group: [25] } - match: {hits.hits.2.sort: [10] } - match: {hits.hits.2._id: "4" } @@ -111,7 +108,6 @@ setup: - match: {hits.total: 6 } - length: {hits.hits: 1 } - match: {hits.hits.0._index: test } - - match: {hits.hits.0._type: _doc } - match: {hits.hits.0.fields.numeric_group: [25]} - match: {hits.hits.0.sort: [10] } - match: {hits.hits.0._id: "4" } @@ -140,7 +136,6 @@ setup: - match: { hits.total: 6 } - length: { hits.hits: 3 } - match: { hits.hits.0._index: test } - - match: { hits.hits.0._type: _doc } - match: { hits.hits.0.fields.numeric_group: [3] } - match: { hits.hits.0.sort: [36] } - match: { hits.hits.0._id: "6" } @@ -148,7 +143,6 @@ setup: - length: { hits.hits.0.inner_hits.sub_hits.hits.hits: 1 } - match: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._id: "6" } - match: { hits.hits.1._index: test } - - match: { hits.hits.1._type: _doc } - match: { hits.hits.1.fields.numeric_group: [1] } - match: { hits.hits.1.sort: [24] } - match: { hits.hits.1._id: "3" } @@ -157,7 +151,6 @@ setup: - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.0._id: "2" } - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._id: "1" } - match: { hits.hits.2._index: test } - - match: { hits.hits.2._type: _doc } - match: { hits.hits.2.fields.numeric_group: [25] } - match: { hits.hits.2.sort: [10] } - match: { hits.hits.2._id: "4" } @@ -219,7 +212,6 @@ setup: - match: { hits.total: 6 } - length: { hits.hits: 3 } - match: { hits.hits.0._index: test } - - match: { hits.hits.0._type: _doc } - match: { hits.hits.0.fields.numeric_group: [3] } - match: { hits.hits.0.sort: [36] } - match: { hits.hits.0._id: "6" } @@ -227,7 +219,6 @@ setup: - length: { hits.hits.0.inner_hits.sub_hits.hits.hits: 1 } - match: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._id: "6" } - match: { hits.hits.1._index: test } - - match: { hits.hits.1._type: _doc } - match: { hits.hits.1.fields.numeric_group: [1] } - match: { hits.hits.1.sort: [24] } - match: { hits.hits.1._id: "3" } @@ -236,7 +227,6 @@ setup: - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.0._id: "2" } - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._id: "1" } - match: { hits.hits.2._index: test } - - match: { hits.hits.2._type: _doc } - match: { hits.hits.2.fields.numeric_group: [25] } - match: { hits.hits.2.sort: [10] } - match: { hits.hits.2._id: "4" } @@ -315,11 +305,6 @@ setup: --- "no hits and inner_hits max_score null": - - - skip: - version: " - 6.99.99" - reason: max_score was set to 0 rather than null before 7.0 - - do: search: rest_total_hits_as_int: true @@ -353,7 +338,6 @@ setup: - match: { hits.total: 6 } - length: { hits.hits: 3 } - match: { hits.hits.0._index: test } - - match: { hits.hits.0._type: _doc } - match: { hits.hits.0.fields.numeric_group: [3] } - match: { hits.hits.0.sort: [36] } - match: { hits.hits.0._id: "6" } @@ -364,7 +348,6 @@ setup: - length: { hits.hits.0.inner_hits.sub_hits_desc.hits.hits: 1 } - match: { hits.hits.0.inner_hits.sub_hits_desc.hits.hits.0._id: "6" } - match: { hits.hits.1._index: test } - - match: { hits.hits.1._type: _doc } - match: { hits.hits.1.fields.numeric_group: [1] } - match: { hits.hits.1.sort: [24] } - match: { hits.hits.1._id: "3" } @@ -376,7 +359,6 @@ setup: - length: { hits.hits.1.inner_hits.sub_hits_desc.hits.hits: 1 } - match: { hits.hits.1.inner_hits.sub_hits_desc.hits.hits.0._id: "3" } - match: { hits.hits.2._index: test } - - match: { hits.hits.2._type: _doc } - match: { hits.hits.2.fields.numeric_group: [25] } - match: { hits.hits.2.sort: [10] } - match: { hits.hits.2._id: "4" } @@ -390,11 +372,6 @@ setup: --- "field collapsing, inner_hits and version": - - - skip: - version: " - 6.1.0" - reason: "bug fixed in 6.1.1" - - do: count: index: test @@ -412,7 +389,6 @@ setup: - match: { hits.total: 6 } - length: { hits.hits: 3 } - match: { hits.hits.0._index: test } - - match: { hits.hits.0._type: _doc } - match: { hits.hits.0.fields.numeric_group: [3] } - match: { hits.hits.0.sort: [36] } - match: { hits.hits.0._id: "6" } @@ -422,7 +398,6 @@ setup: - match: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._id: "6" } - match: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._version: 66 } - match: { hits.hits.1._index: test } - - match: { hits.hits.1._type: _doc } - match: { hits.hits.1.fields.numeric_group: [1] } - match: { hits.hits.1.sort: [24] } - match: { hits.hits.1._id: "3" } @@ -434,7 +409,6 @@ setup: - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._id: "1" } - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._version: 11 } - match: { hits.hits.2._index: test } - - match: { hits.hits.2._type: _doc } - match: { hits.hits.2.fields.numeric_group: [25] } - match: { hits.hits.2.sort: [10] } - match: { hits.hits.2._id: "4" } @@ -493,11 +467,6 @@ setup: --- "field collapsing, inner_hits and seq_no": - - - skip: - version: " - 6.99.0" - reason: "sequence numbers introduced in 7.0.0" - - do: search: rest_total_hits_as_int: true @@ -532,7 +501,6 @@ setup: - gte: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._seq_no: 0 } - gte: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._primary_term: 1 } - match: { hits.hits.2._index: test } - - match: { hits.hits.2._type: _doc } - match: { hits.hits.2.fields.numeric_group: [25] } - match: { hits.hits.2.sort: [10] } - match: { hits.hits.2._id: "4" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml index be34e10ddcd74..77298cb4f61c3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml @@ -39,7 +39,7 @@ search: rest_total_hits_as_int: true index: "search_index" - body: { "size" : 0, "query" : { "terms" : { "user" : { "index": "lookup_index", "type" : "_doc", "id": "1", "path": "followers"} } } } + body: { "size" : 0, "query" : { "terms" : { "user" : { "index": "lookup_index", "id": "1", "path": "followers"} } } } - do: indices.create: index: lookup_index @@ -64,7 +64,7 @@ search: rest_total_hits_as_int: true index: "search_index" - body: { "size" : 0, "query" : { "terms" : { "user" : { "index": "lookup_index", "type" : "_doc", "id": "1", "path": "followers"} } } } + body: { "size" : 0, "query" : { "terms" : { "user" : { "index": "lookup_index", "id": "1", "path": "followers"} } } } - match: { _shards.total: 5 } - match: { _shards.successful: 5 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query.yml index 33b149b00a4fb..201e456be2cdd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query.yml @@ -550,9 +550,6 @@ setup: --- "Test exists query on _index field": - - skip: - version: " - 6.0.99" - reason: exists on _index not supported prior to 6.1.0 - do: search: rest_total_hits_as_int: true @@ -566,9 +563,6 @@ setup: --- "Test exists query on _type field": - - skip: - version: " - 6.0.99" - reason: exists on _type not supported prior to 6.1.0 - do: search: rest_total_hits_as_int: true @@ -608,9 +602,6 @@ setup: --- "Test exists query on _source field": - - skip: - version: " - 6.0.99" - reason: exists on _source not supported prior to 6.1.0 - do: catch: /query_shard_exception/ search: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/170_terms_query.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/170_terms_query.yml index 89ea24618c68f..82ccb816f2314 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/170_terms_query.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/170_terms_query.yml @@ -1,8 +1,5 @@ --- "Terms Query with No.of terms exceeding index.max_terms_count should FAIL": - - skip: - version: " - 6.99.99" - reason: index.max_terms_count setting has been added in 7.0.0 - do: indices.create: index: test_index diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/171_terms_query_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/171_terms_query_with_types.yml deleted file mode 100644 index d3d48eae4082d..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/171_terms_query_with_types.yml +++ /dev/null @@ -1,63 +0,0 @@ ---- -"Terms Query with No.of terms exceeding index.max_terms_count should FAIL": - - skip: - version: " - 6.99.99" - reason: index.max_terms_count setting has been added in 7.0.0 - features: allowed_warnings - - do: - indices.create: - include_type_name: true - index: test_index - body: - settings: - number_of_shards: 1 - index.max_terms_count: 2 - mappings: - test_type: - properties: - user: - type: keyword - followers: - type: keyword - - do: - bulk: - refresh: true - body: - - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "u1"}}' - - '{"user": "u1", "followers": ["u2", "u3"]}' - - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "u2"}}' - - '{"user": "u2", "followers": ["u1", "u3", "u4"]}' - - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "u3"}}' - - '{"user": "u3", "followers": ["u1"]}' - - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "u4"}}' - - '{"user": "u4", "followers": ["u3"]}' - - - do: - search: - rest_total_hits_as_int: true - index: test_index - body: {"query" : {"terms" : {"user" : ["u1", "u2"]}}} - - match: { hits.total: 2 } - - - do: - catch: bad_request - search: - rest_total_hits_as_int: true - index: test_index - body: {"query" : {"terms" : {"user" : ["u1", "u2", "u3"]}}} - - - do: - allowed_warnings: - - "Deprecated field [type] used, this field is unused and will be removed entirely" - search: - rest_total_hits_as_int: true - index: test_index - body: {"query" : {"terms" : {"user" : {"index" : "test_index", "type" : "test_type", "id" : "u1", "path" : "followers"}}}} - - match: { hits.total: 2 } - - - do: - catch: bad_request - search: - rest_total_hits_as_int: true - index: test_index - body: {"query" : {"terms" : {"user" : {"index" : "test_index", "type" : "test_type", "id" : "u2", "path" : "followers"}}}} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/190_index_prefix_search.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/190_index_prefix_search.yml index 40c80b88cfb1b..6f276f669f815 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/190_index_prefix_search.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/190_index_prefix_search.yml @@ -1,8 +1,4 @@ setup: - - skip: - version: " - 6.2.99" - reason: index_prefixes is only available as of 6.3.0 - - do: indices.create: index: test @@ -27,9 +23,6 @@ setup: --- "search with index prefixes": - - skip: - version: " - 6.2.99" - reason: index_prefixes is only available as of 6.3.0 - do: search: rest_total_hits_as_int: true @@ -85,10 +78,6 @@ setup: --- "search index prefixes with span_multi": - - skip: - version: " - 6.99.99" - reason: span_multi throws an exception with prefix fields on < versions - - do: search: rest_total_hits_as_int: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_ignore_malformed.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_ignore_malformed.yml index 71ddb32302396..8596821a76c41 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_ignore_malformed.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_ignore_malformed.yml @@ -1,9 +1,5 @@ --- setup: - - skip: - version: " - 6.3.99" - reason: _ignored was added in 6.4.0 - - do: indices.create: index: test diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_index_phrase_search.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_index_phrase_search.yml index b48857be4e7a1..6340b20a4765f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_index_phrase_search.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_index_phrase_search.yml @@ -1,8 +1,5 @@ --- "search with indexed phrases": - - skip: - version: " - 6.99.99" - reason: index_phrase is only available as of 7.0.0 - do: indices.create: index: test diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/20_default_values.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/20_default_values.yml index fd4621e48cad3..14d68fa3c429a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/20_default_values.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/20_default_values.yml @@ -46,7 +46,6 @@ setup: - match: {hits.total: 1} - match: {hits.hits.0._index: test_1 } - - match: {hits.hits.0._type: _doc } - match: {hits.hits.0._id: "1" } - do: @@ -60,7 +59,6 @@ setup: - match: {hits.total: 1} - match: {hits.hits.0._index: test_2 } - - match: {hits.hits.0._type: _doc } - match: {hits.hits.0._id: "42" } --- @@ -76,9 +74,6 @@ setup: --- "Search with new response format": - - skip: - version: " - 6.99.99" - reason: hits.total is returned as an object in 7.0.0 - do: search: body: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/210_rescore_explain.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/210_rescore_explain.yml index 92bb049980dff..4d63a81a99595 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/210_rescore_explain.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/210_rescore_explain.yml @@ -1,8 +1,5 @@ --- "Score should match explanation in rescore": - - skip: - version: " - 6.99.99" - reason: Explanation for rescoring was corrected after these versions - do: bulk: refresh: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml index 78380d0da6a71..0286d3caf66b8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml @@ -1,8 +1,4 @@ setup: - - skip: - version: " - 6.99.99" - reason: "Implemented in 7.0" - - do: indices.create: index: test @@ -17,16 +13,73 @@ setup: refresh: true body: - '{"index": {"_index": "test", "_id": "1"}}' - - '{"text" : "Some like it hot, some like it cold"}' + - '{"text" : "Some like hot and dry, some like it cold and wet"}' - '{"index": {"_index": "test", "_id": "2"}}' - '{"text" : "Its cold outside, theres no kind of atmosphere"}' - '{"index": {"_index": "test", "_id": "3"}}' - '{"text" : "Baby its cold there outside"}' - '{"index": {"_index": "test", "_id": "4"}}' - '{"text" : "Outside it is cold and wet"}' + - '{"index": {"_index": "test", "_id": "5"}}' + - '{"text" : "cold rain makes it wet"}' + - '{"index": {"_index": "test", "_id": "6"}}' + - '{"text" : "that is some cold cold rain"}' --- -"Test ordered matching": +"Test regexp": + - skip: + version: " - 1.2.99" + reason: "regexp introduced in 1.3" + - do: + search: + index: test + body: + query: + intervals: + text: + regexp: + pattern: "at[a-z]{2,}here" + - match: { hits.total.value: 1 } + +--- +"Test regexp, explicit case sensitive": + - skip: + version: " - 1.2.99" + reason: "case_insensitive introduced in 1.3" + - do: + search: + index: test + body: + query: + intervals: + text: + regexp: + pattern: "AT[a-z]{2,}HERE" + case_insensitive: false + - match: { hits.total.value: 0 } + +--- +"Test regexp, explicit case insensitive": + - skip: + version: " - 1.2.99" + reason: "case_insensitive introduced in 1.3" + - do: + search: + index: test + body: + query: + intervals: + text: + regexp: + pattern: "AT[a-z]{2,}HERE" + case_insensitive: true + - match: { hits.total.value: 1 } + +--- +"Test ordered matching with via mode": + - skip: + version: " - 1.2.99" + reason: "mode introduced in 1.3" - do: search: index: test @@ -36,7 +89,25 @@ setup: text: match: query: "cold outside" - ordered: true + mode: "ordered" + - match: { hits.total.value: 2 } + +--- +"Test ordered matching": + - skip: + features: allowed_warnings + - do: + allowed_warnings: + - "Deprecated field [ordered] used, this field is unused and will be removed entirely" + search: + index: test + body: + query: + intervals: + text: + match: + query: "cold outside" + ordered: true - match: { hits.total.value: 2 } --- @@ -52,9 +123,30 @@ setup: query: "cold outside" - match: { hits.total.value: 3 } +--- +"Test explicit unordered matching via mode": + - skip: + version: " - 1.2.99" + reason: "mode introduced in 1.3" + - do: + search: + index: test + body: + query: + intervals: + text: + match: + query: "cold outside" + mode: "unordered" + - match: { hits.total.value: 3 } + --- "Test explicit unordered matching": + - skip: + features: allowed_warnings - do: + allowed_warnings: + - "Deprecated field [ordered] used, this field is unused and will be removed entirely" search: index: test body: @@ -66,8 +158,45 @@ setup: ordered: false - match: { hits.total.value: 3 } +--- +"Test unordered with overlap in match": + - skip: + version: " - 1.2.99" + reason: "Implemented in 2.0" + - do: + search: + index: test + body: + query: + intervals: + text: + match: + query: "cold wet it" + mode: "unordered" + - match: { hits.total.value: 3 } + +--- +"Test unordered with no overlap in match": + - skip: + version: " - 1.2.99" + reason: "Implemented in 2.0" + - do: + search: + index: test + body: + query: + intervals: + text: + match: + query: "cold wet it" + mode: "unordered_no_overlap" + - match: { hits.total.value: 2 } + --- "Test phrase matching": + - skip: + version: " - 1.2.99" + reason: "mode introduced in 1.3" - do: search: index: test @@ -77,7 +206,7 @@ setup: text: match: query: "cold outside" - ordered: true + mode: "ordered" max_gaps: 0 - match: { hits.total.value: 1 } @@ -97,6 +226,9 @@ setup: --- "Test ordered max_gaps matching": + - skip: + version: " - 1.2.99" + reason: "mode introduced in 1.3" - do: search: index: test @@ -107,12 +239,41 @@ setup: match: query: "cold outside" max_gaps: 0 - ordered: true + mode: "ordered" + - match: { hits.total.value: 1 } + +--- +"Test ordered combination with disjunction via mode": + - skip: + version: " - 1.2.99" + reason: "mode introduced in 1.3" + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - any_of: + intervals: + - match: + query: "cold" + - match: + query: "outside" + - match: + query: "atmosphere" + mode: "ordered" - match: { hits.total.value: 1 } --- "Test ordered combination with disjunction": + - skip: + features: allowed_warnings - do: + allowed_warnings: + - "Deprecated field [ordered] used, this field is unused and will be removed entirely" search: index: test body: @@ -134,6 +295,9 @@ setup: --- "Test ordered combination with max_gaps": + - skip: + version: " - 1.2.99" + reason: "mode introduced in 1.3" - do: search: index: test @@ -148,11 +312,14 @@ setup: - match: query: "outside" max_gaps: 0 - ordered: true + mode: "ordered" - match: { hits.total.value: 1 } --- "Test ordered combination": + - skip: + version: " - 1.2.99" + reason: "mode introduced in 1.3" - do: search: index: test @@ -166,12 +333,38 @@ setup: query: "cold" - match: query: "outside" - ordered: true + mode: "ordered" + - match: { hits.total.value: 2 } + +--- +"Test unordered combination via mode": + - skip: + version: " - 1.2.99" + reason: "mode introduced in 1.3" + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - match: + query: "cold" + - match: + query: "outside" + max_gaps: 1 + mode: "unordered" - match: { hits.total.value: 2 } --- "Test unordered combination": + - skip: + features: allowed_warnings - do: + allowed_warnings: + - "Deprecated field [ordered] used, this field is unused and will be removed entirely" search: index: test body: @@ -188,8 +381,107 @@ setup: ordered: false - match: { hits.total.value: 2 } +--- +"Test unordered combination with overlap": + - skip: + version: " - 1.2.99" + reason: "Implemented in 2.0" + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - match: + query: "cold" + - match: + query: "wet" + - match: + query: "it" + mode: "unordered" + - match: { hits.total.value: 3 } + +--- +"Test unordered combination no overlap": + - skip: + version: " - 1.2.99" + reason: "Implemented in 2.0" + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - match: + query: "cold" + - match: + query: "wet" + - match: + query: "it" + mode: "unordered_no_overlap" + - match: { hits.total.value: 2 } + +--- +"Test nested unordered combination with overlap": + - skip: + version: " - 1.2.99" + reason: "Implemented in 2.0" + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - any_of: + intervals: + - match: + query: "cold" + - match: + query: "hot" + - match: + query: "cold" + mode: "unordered" + - match: { hits.total.value: 6 } + +--- +"Test nested unordered combination no overlap": + - skip: + version: " - 1.2.99" + reason: "Implemented in 2.0" + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - any_of: + intervals: + - match: + query: "cold" + - match: + query: "hot" + - match: + query: "cold" + mode: "unordered_no_overlap" + - match: { hits.total.value: 2 } + --- "Test block combination": + - skip: + version: " - 1.2.99" + reason: "mode introduced in 1.3" - do: search: index: test @@ -203,13 +495,16 @@ setup: query: "cold" - match: query: "outside" - ordered: true + mode: "ordered" max_gaps: 0 - match: { hits.total.value: 1 } --- "Test containing": + - skip: + version: " - 1.2.99" + reason: "mode introduced in 1.3" - do: search: index: test @@ -223,7 +518,7 @@ setup: query: "cold" - match: query: "outside" - ordered: false + mode: "unordered" filter: containing: match: @@ -233,6 +528,9 @@ setup: --- "Test not containing": + - skip: + version: " - 1.2.99" + reason: "mode introduced in 1.3" - do: search: index: test @@ -246,7 +544,7 @@ setup: query: "cold" - match: query: "outside" - ordered: false + mode: "unordered" filter: not_containing: match: @@ -255,6 +553,9 @@ setup: --- "Test contained_by": + - skip: + version: " - 1.2.99" + reason: "mode introduced in 1.3" - do: search: index: test @@ -272,7 +573,7 @@ setup: query: "cold" - match: query: "outside" - ordered: false + mode: "unordered" - match: { hits.total.value: 1 } --- @@ -294,10 +595,13 @@ setup: query: "cold" - match: query: "outside" - - match: { hits.total.value: 1 } + - match: { hits.total.value: 2 } --- "Test not_overlapping": + - skip: + version: " - 1.2.99" + reason: "mode introduced in 1.3" - do: search: index: test @@ -311,7 +615,7 @@ setup: query: "cold" - match: query: "outside" - ordered: true + mode: "ordered" filter: not_overlapping: all_of: @@ -320,14 +624,14 @@ setup: query: "baby" - match: query: "there" - ordered: false + mode: "unordered" - match: { hits.total.value: 1 } --- "Test overlapping": - skip: - version: " - 7.1.99" - reason: "Implemented in 7.2" + version: " - 1.2.99" + reason: "mode introduced in 1.3" - do: search: index: test @@ -337,12 +641,12 @@ setup: text: match: query: "cold outside" - ordered: true + mode: "ordered" filter: overlapping: match: query: "baby there" - ordered: false + mode: "unordered" - match: { hits.total.value: 1 } - match: { hits.hits.0._id: "3" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml index 7657dc2bebb36..feb875e81a785 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml @@ -1,8 +1,4 @@ setup: - - skip: - version: " - 6.99.99" - reason: "Implemented in 7.0" - - do: indices.create: index: date_ns diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/30_limits.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/30_limits.yml index 17735c7fd451a..1f550d114cf29 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/30_limits.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/30_limits.yml @@ -64,10 +64,6 @@ setup: --- "Docvalues_fields size limit": - - - skip: - version: " - 6.99.99" - reason: "Triggers warnings before 7.0" - do: catch: /Trying to retrieve too many docvalue_fields\. Must be less than or equal to[:] \[2\] but was \[3\]\. This limit can be set by changing the \[index.max_docvalue_fields_search\] index level setting\./ search: @@ -99,10 +95,6 @@ setup: --- "Regexp length limit": - - skip: - version: " - 6.99.99" - reason: "The regex length limit was introduced in 7.0.0" - - do: catch: /The length of regex \[1110\] used in the Regexp Query request has exceeded the allowed maximum of \[1000\]\. This maximum can be set by changing the \[index.max_regex_length\] index level setting\./ search: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/70_response_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/70_response_filtering.yml index d306cb7b1ad50..e38f5f862a273 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/70_response_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/70_response_filtering.yml @@ -31,10 +31,8 @@ - is_true: _shards.total - is_true: hits.total - is_true: hits.hits.0._index - - is_true: hits.hits.0._type - is_true: hits.hits.0._id - is_true: hits.hits.1._index - - is_true: hits.hits.1._type - is_true: hits.hits.1._id - do: @@ -48,10 +46,8 @@ - is_false: _shards.total - is_false: hits.total - is_false: hits.hits.0._index - - is_false: hits.hits.0._type - is_false: hits.hits.0._id - is_false: hits.hits.1._index - - is_false: hits.hits.1._type - is_false: hits.hits.1._id - do: @@ -65,10 +61,8 @@ - is_true: _shards.total - is_false: hits.total - is_false: hits.hits.0._index - - is_false: hits.hits.0._type - is_false: hits.hits.0._id - is_false: hits.hits.1._index - - is_false: hits.hits.1._type - is_false: hits.hits.1._id - do: @@ -82,10 +76,8 @@ - is_true: _shards.total - is_true: hits.total - is_true: hits.hits.0._index - - is_false: hits.hits.0._type - is_true: hits.hits.0._id - is_true: hits.hits.1._index - - is_false: hits.hits.1._type - is_true: hits.hits.1._id --- diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/90_search_after.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/90_search_after.yml index 9f0273fbc0213..5f5d88dba7687 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/90_search_after.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/90_search_after.yml @@ -38,7 +38,6 @@ - match: {hits.total: 3 } - length: {hits.hits: 1 } - match: {hits.hits.0._index: test } - - match: {hits.hits.0._type: _doc } - match: {hits.hits.0._id: "172" } - match: {hits.hits.0.sort: [24, 172] } @@ -57,7 +56,6 @@ - match: {hits.total: 3 } - length: {hits.hits: 1 } - match: {hits.hits.0._index: test } - - match: {hits.hits.0._type: _doc } - match: {hits.hits.0._id: "42" } - match: {hits.hits.0.sort: [18, 42] } @@ -76,7 +74,6 @@ - match: {hits.total: 3} - length: {hits.hits: 1 } - match: {hits.hits.0._index: test } - - match: {hits.hits.0._type: _doc } - match: {hits.hits.0._id: "1" } - match: {hits.hits.0.sort: [18, 1] } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.create/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.create/10_basic.yml index fe70620c6ef62..ee831e78c74a6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.create/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.create/10_basic.yml @@ -85,10 +85,6 @@ setup: --- "Create a snapshot for missing index": - - skip: - version: " - 6.0.0" - reason: ignore_unavailable default is false in 6.0.0 - - do: catch: missing snapshot.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.get/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.get/10_basic.yml index 874dda3606c4a..57a4cb55852a5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.get/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.get/10_basic.yml @@ -96,10 +96,6 @@ setup: --- "Get snapshot info contains include_global_state": - - skip: - version: " - 6.1.99" - reason: "include_global_state field has been added in the response in 6.2.0" - - do: indices.create: index: test_index diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/20_completion.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/20_completion.yml index b64a51141dc6e..dfed3346726cf 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/20_completion.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/20_completion.yml @@ -281,21 +281,15 @@ setup: - length: { suggest.result.0.options: 2 } - match: { suggest.result.0.options.0.text: "baz" } - match: { suggest.result.0.options.0._index: "test" } - - match: { suggest.result.0.options.0._type: "_doc" } - match: { suggest.result.0.options.0._source.title: "title_baz" } - match: { suggest.result.0.options.0._source.count: 3 } - match: { suggest.result.0.options.1.text: "bar" } - match: { suggest.result.0.options.1._index: "test" } - - match: { suggest.result.0.options.1._type: "_doc" } - match: { suggest.result.0.options.1._source.title: "title_bar" } - match: { suggest.result.0.options.1._source.count: 4 } --- "Skip duplicates should work": - - skip: - version: " - 6.0.99" - reason: skip_duplicates was added in 6.1 - - do: index: index: test diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/30_context.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/30_context.yml index e2c7ccfb421e3..df415ef484b1f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/30_context.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/30_context.yml @@ -277,10 +277,6 @@ setup: --- "Skip duplicates with contexts should work": - - skip: - version: " - 6.0.99" - reason: skip_duplicates was added in 6.1 - - do: index: index: test @@ -333,10 +329,6 @@ setup: --- "Indexing and Querying without contexts is forbidden": - - skip: - version: " - 6.99.99" - reason: this feature was removed in 7.0 - - do: index: index: test diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml index a29019183e199..bcd5fa14c87f9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml @@ -1,11 +1,6 @@ --- "Search by suggestion and by keyword sub-field should work": - - - skip: - version: " - 6.99.99" - reason: "Search by suggestion with multi-fields was introduced 7.0.0" - - do: indices.create: index: completion_with_sub_keyword @@ -63,11 +58,6 @@ --- "Search by suggestion on sub field should work": - - - skip: - version: " - 6.99.99" - reason: "Search by suggestion with multi-fields was introduced 7.0.0" - - do: indices.create: index: completion_with_sub_completion @@ -113,11 +103,6 @@ --- "Search by suggestion on sub field with context should work": - - - skip: - version: " - 6.99.99" - reason: "Search by suggestion with multi-fields was introduced 7.0.0" - - do: indices.create: index: completion_with_context @@ -182,11 +167,6 @@ --- "Search by suggestion on sub field with weight should work": - - - skip: - version: " - 6.99.99" - reason: "Search by suggestion with multi-fields was introduced 7.0.0" - - do: indices.create: index: completion_with_weight @@ -238,11 +218,6 @@ --- "Search by suggestion on geofield-hash on sub field should work": - - - skip: - version: " - 6.99.99" - reason: "Search by suggestion with multi-fields was introduced 7.0.0" - - do: indices.create: index: geofield_with_completion diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yml index 1742134af2b75..d0385ac0125f4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yml @@ -23,10 +23,7 @@ --- "tasks_list headers": - skip: - version: " - 6.99.99" - features: headers - reason: task headers has been added in 7.0.0 - + features: headers - do: headers: { "X-Opaque-Id": "That is me" } tasks.list: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/20_issue7121.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/20_issue7121.yml index 5f43e8a247923..44a78cadc1ada 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/20_issue7121.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/20_issue7121.yml @@ -1,8 +1,3 @@ -setup: - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - --- "Term vector API should return 'found: false' for docs between index and refresh": - do: @@ -39,6 +34,5 @@ setup: realtime: false - match: { _index: "testidx" } - - match: { _type: "_doc" } - match: { _id: "1" } - is_false: found diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/30_realtime.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/30_realtime.yml index 0cb6dfc06904b..1d357bb587021 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/30_realtime.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/30_realtime.yml @@ -1,8 +1,3 @@ -setup: - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - --- "Realtime Term Vectors": diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/10_doc.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/10_doc.yml index 3a35ad46f9161..4cb6710cc161c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/10_doc.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/10_doc.yml @@ -1,10 +1,5 @@ --- "Partial document": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: index: index: test_1 @@ -25,7 +20,6 @@ one: 3 - match: { _index: test_1 } - - match: { _type: _doc } - match: { _id: "1" } - match: { _version: 2 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/11_shard_header.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/11_shard_header.yml index 41dba3551e64c..ffcb72027b33d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/11_shard_header.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/11_shard_header.yml @@ -32,7 +32,6 @@ foo: baz - match: { _index: foobar } - - match: { _type: _doc } - match: { _id: "1"} - match: { _version: 2} - match: { _shards.total: 1} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yml index 657c036291bd6..ff81bdfd39b26 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yml @@ -1,9 +1,5 @@ --- "Update result field": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: update: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/13_legacy_doc.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/13_legacy_doc.yml index 08f3457400d4f..a97c68ba6ee3f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/13_legacy_doc.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/13_legacy_doc.yml @@ -21,7 +21,6 @@ one: 3 - match: { _index: test_1 } - - match: { _type: _doc } - match: { _id: "1" } - match: { _version: 2 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/15_result_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/15_result_with_types.yml deleted file mode 100644 index 9adada6d54b4f..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/15_result_with_types.yml +++ /dev/null @@ -1,52 +0,0 @@ ---- -"Update result field": - - - do: - update: - index: test_1 - type: test - id: 1 - body: - doc: { foo: bar } - doc_as_upsert: true - - - match: { _version: 1 } - - match: { result: created } - - - do: - update: - index: test_1 - type: test - id: 1 - body: - doc: { foo: bar } - doc_as_upsert: true - - - match: { _version: 1 } - - match: { result: noop } - - - do: - update: - index: test_1 - type: test - id: 1 - body: - doc: { foo: bar } - doc_as_upsert: true - detect_noop: false - - - match: { _version: 2 } - - match: { result: updated } - - - do: - update: - index: test_1 - type: test - id: 1 - body: - doc: { foo: baz } - doc_as_upsert: true - detect_noop: true - - - match: { _version: 3 } - - match: { result: updated } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml index a849eecc66629..4d03971aba252 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml @@ -1,9 +1,5 @@ --- "Doc upsert": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: update: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/21_doc_upsert_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/21_doc_upsert_with_types.yml deleted file mode 100644 index f34e030ff66a0..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/21_doc_upsert_with_types.yml +++ /dev/null @@ -1,41 +0,0 @@ ---- -"Doc upsert": - - - do: - update: - index: test_1 - type: test - id: 1 - body: - doc: { foo: bar, count: 1 } - upsert: { foo: baz } - - - do: - get: - index: test_1 - type: test - id: 1 - - - match: { _source.foo: baz } - - is_false: _source.count - - - - do: - update: - index: test_1 - type: test - id: 1 - body: - doc: { foo: bar, count: 1 } - upsert: { foo: baz } - - - do: - get: - index: test_1 - type: test - id: 1 - - - match: { _source.foo: bar } - - match: { _source.count: 1 } - - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml index 5bdc3ecea75fc..c65fc5af27fcc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml @@ -1,9 +1,5 @@ --- "Doc as upsert": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: update: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/24_doc_as_upsert_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/24_doc_as_upsert_with_types.yml deleted file mode 100644 index 7585b9f3e0b94..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/24_doc_as_upsert_with_types.yml +++ /dev/null @@ -1,41 +0,0 @@ ---- -"Doc as upsert": - - - do: - update: - index: test_1 - type: test - id: 1 - body: - doc: { foo: bar, count: 1 } - doc_as_upsert: true - - - do: - get: - index: test_1 - type: test - id: 1 - - - match: { _source.foo: bar } - - match: { _source.count: 1 } - - - - do: - update: - index: test_1 - type: test - id: 1 - body: - doc: { count: 2 } - doc_as_upsert: true - - - do: - get: - index: test_1 - type: test - id: 1 - - - match: { _source.foo: bar } - - match: { _source.count: 2 } - - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/41_routing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/41_routing_with_types.yml deleted file mode 100644 index 977db506710c7..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/41_routing_with_types.yml +++ /dev/null @@ -1,58 +0,0 @@ ---- -"Routing": - - - do: - indices.create: - index: test_1 - body: - settings: - index: - number_of_shards: 5 - number_of_routing_shards: 5 - number_of_replicas: 0 - - - do: - cluster.health: - wait_for_status: green - - - do: - update: - index: test_1 - type: test - id: 1 - routing: 5 - body: - doc: { foo: baz } - upsert: { foo: bar } - - - do: - get: - index: test_1 - type: test - id: 1 - routing: 5 - stored_fields: _routing - - - match: { _routing: "5"} - - - do: - catch: missing - update: - index: test_1 - type: test - id: 1 - body: - doc: { foo: baz } - - - do: - update: - index: test_1 - type: test - id: 1 - routing: 5 - _source: foo - body: - doc: { foo: baz } - - - match: { get._source.foo: baz } - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yml index 9e6d5a4671955..e196e03143456 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yml @@ -1,9 +1,5 @@ --- "Source filtering": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: update: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/81_source_filtering_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/81_source_filtering_with_types.yml deleted file mode 100644 index 4bb22e6b8012e..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/81_source_filtering_with_types.yml +++ /dev/null @@ -1,19 +0,0 @@ ---- -"Source filtering": - - - do: - update: - index: test_1 - type: test - id: 1 - _source: [foo, bar] - body: - doc: { foo: baz } - upsert: { foo: bar } - - - match: { get._source.foo: bar } - - is_false: get._source.bar - -# TODO: -# -# - Add _routing diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/86_fields_meta_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/86_fields_meta_with_types.yml deleted file mode 100644 index f7791d0986399..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/86_fields_meta_with_types.yml +++ /dev/null @@ -1,33 +0,0 @@ ---- -"Metadata Fields": - - - skip: - version: "all" - reason: "Update doesn't return metadata fields, waiting for #3259" - - - do: - indices.create: - index: test_1 - - - do: - update: - index: test_1 - type: test - id: 1 - parent: 5 - fields: [ _routing ] - body: - doc: { foo: baz } - upsert: { foo: bar } - - - match: { get._routing: "5" } - - - do: - get: - index: test_1 - type: test - id: 1 - parent: 5 - stored_fields: [ _routing ] - - diff --git a/server/build.gradle b/server/build.gradle index 44b88754312ac..3a11428ca7919 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -64,14 +64,14 @@ if (!isEclipse) { } compileJava11Java { - sourceCompatibility = 11 - targetCompatibility = 11 + sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_11 } tasks.named('forbiddenApisJava11').configure { doFirst { if (BuildParams.runtimeJavaVersion < JavaVersion.VERSION_11) { - targetCompatibility = JavaVersion.VERSION_11.getMajorVersion() + targetCompatibility = JavaVersion.VERSION_11 } } } @@ -227,7 +227,6 @@ tasks.named("thirdPartyAudit").configure { 'com.fasterxml.jackson.databind.ObjectMapper', // from log4j - 'com.conversantmedia.util.concurrent.DisruptorBlockingQueue', 'com.conversantmedia.util.concurrent.SpinPolicy', 'com.fasterxml.jackson.annotation.JsonInclude$Include', 'com.fasterxml.jackson.databind.DeserializationContext', @@ -251,8 +250,6 @@ tasks.named("thirdPartyAudit").configure { 'com.fasterxml.jackson.databind.node.ObjectNode', 'org.fusesource.jansi.Ansi', 'org.fusesource.jansi.AnsiRenderer$Code', - 'com.lmax.disruptor.BlockingWaitStrategy', - 'com.lmax.disruptor.BusySpinWaitStrategy', 'com.lmax.disruptor.EventFactory', 'com.lmax.disruptor.EventTranslator', 'com.lmax.disruptor.EventTranslatorTwoArg', @@ -262,10 +259,7 @@ tasks.named("thirdPartyAudit").configure { 'com.lmax.disruptor.RingBuffer', 'com.lmax.disruptor.Sequence', 'com.lmax.disruptor.SequenceReportingEventHandler', - 'com.lmax.disruptor.SleepingWaitStrategy', - 'com.lmax.disruptor.TimeoutBlockingWaitStrategy', 'com.lmax.disruptor.WaitStrategy', - 'com.lmax.disruptor.YieldingWaitStrategy', 'com.lmax.disruptor.dsl.Disruptor', 'com.lmax.disruptor.dsl.ProducerType', 'javax.jms.Connection', @@ -284,23 +278,17 @@ tasks.named("thirdPartyAudit").configure { 'javax.mail.Transport', 'javax.mail.internet.InternetAddress', 'javax.mail.internet.InternetHeaders', - 'javax.mail.internet.MimeBodyPart', 'javax.mail.internet.MimeMessage', 'javax.mail.internet.MimeMultipart', 'javax.mail.internet.MimeUtility', - 'javax.mail.util.ByteArrayDataSource', 'org.apache.commons.compress.compressors.CompressorStreamFactory', 'org.apache.commons.compress.utils.IOUtils', 'org.apache.commons.csv.CSVFormat', 'org.apache.commons.csv.QuoteMode', - 'org.apache.kafka.clients.producer.KafkaProducer', 'org.apache.kafka.clients.producer.Producer', - 'org.apache.kafka.clients.producer.ProducerRecord', 'org.apache.kafka.clients.producer.RecordMetadata', 'org.codehaus.stax2.XMLStreamWriter2', 'org.jctools.queues.MpscArrayQueue', - 'org.osgi.framework.AdaptPermission', - 'org.osgi.framework.AdminPermission', 'org.osgi.framework.Bundle', 'org.osgi.framework.BundleActivator', 'org.osgi.framework.BundleContext', diff --git a/server/src/internalClusterTest/java/org/opensearch/action/IndicesRequestIT.java b/server/src/internalClusterTest/java/org/opensearch/action/IndicesRequestIT.java index 666c0a87a7acb..17366cf0d08fc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/IndicesRequestIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/IndicesRequestIT.java @@ -234,11 +234,7 @@ public void testIndex() { String[] indexShardActions = new String[] { BulkAction.NAME + "[s][p]", BulkAction.NAME + "[s][r]" }; interceptTransportActions(indexShardActions); - IndexRequest indexRequest = new IndexRequest(randomIndexOrAlias(), "type", "id").source( - Requests.INDEX_CONTENT_TYPE, - "field", - "value" - ); + IndexRequest indexRequest = new IndexRequest(randomIndexOrAlias()).id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); internalCluster().coordOnlyNodeClient().index(indexRequest).actionGet(); clearInterceptedActions(); @@ -249,7 +245,7 @@ public void testDelete() { String[] deleteShardActions = new String[] { BulkAction.NAME + "[s][p]", BulkAction.NAME + "[s][r]" }; interceptTransportActions(deleteShardActions); - DeleteRequest deleteRequest = new DeleteRequest(randomIndexOrAlias(), "type", "id"); + DeleteRequest deleteRequest = new DeleteRequest(randomIndexOrAlias()).id("id"); internalCluster().coordOnlyNodeClient().delete(deleteRequest).actionGet(); clearInterceptedActions(); @@ -262,8 +258,8 @@ public void testUpdate() { interceptTransportActions(updateShardActions); String indexOrAlias = randomIndexOrAlias(); - client().prepareIndex(indexOrAlias, "type", "id").setSource("field", "value").get(); - UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "type", "id").doc(Requests.INDEX_CONTENT_TYPE, "field1", "value1"); + client().prepareIndex(indexOrAlias).setId("id").setSource("field", "value").get(); + UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "id").doc(Requests.INDEX_CONTENT_TYPE, "field1", "value1"); UpdateResponse updateResponse = internalCluster().coordOnlyNodeClient().update(updateRequest).actionGet(); assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); @@ -277,7 +273,7 @@ public void testUpdateUpsert() { interceptTransportActions(updateShardActions); String indexOrAlias = randomIndexOrAlias(); - UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "type", "id").upsert(Requests.INDEX_CONTENT_TYPE, "field", "value") + UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "id").upsert(Requests.INDEX_CONTENT_TYPE, "field", "value") .doc(Requests.INDEX_CONTENT_TYPE, "field1", "value1"); UpdateResponse updateResponse = internalCluster().coordOnlyNodeClient().update(updateRequest).actionGet(); assertEquals(DocWriteResponse.Result.CREATED, updateResponse.getResult()); @@ -292,8 +288,8 @@ public void testUpdateDelete() { interceptTransportActions(updateShardActions); String indexOrAlias = randomIndexOrAlias(); - client().prepareIndex(indexOrAlias, "type", "id").setSource("field", "value").get(); - UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "type", "id").script( + client().prepareIndex(indexOrAlias).setId("id").setSource("field", "value").get(); + UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "id").script( new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx.op='delete'", Collections.emptyMap()) ); UpdateResponse updateResponse = internalCluster().coordOnlyNodeClient().update(updateRequest).actionGet(); @@ -312,19 +308,19 @@ public void testBulk() { int numIndexRequests = iterations(1, 10); for (int i = 0; i < numIndexRequests; i++) { String indexOrAlias = randomIndexOrAlias(); - bulkRequest.add(new IndexRequest(indexOrAlias, "type", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value")); + bulkRequest.add(new IndexRequest(indexOrAlias).id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value")); indices.add(indexOrAlias); } int numDeleteRequests = iterations(1, 10); for (int i = 0; i < numDeleteRequests; i++) { String indexOrAlias = randomIndexOrAlias(); - bulkRequest.add(new DeleteRequest(indexOrAlias, "type", "id")); + bulkRequest.add(new DeleteRequest(indexOrAlias).id("id")); indices.add(indexOrAlias); } int numUpdateRequests = iterations(1, 10); for (int i = 0; i < numUpdateRequests; i++) { String indexOrAlias = randomIndexOrAlias(); - bulkRequest.add(new UpdateRequest(indexOrAlias, "type", "id").doc(Requests.INDEX_CONTENT_TYPE, "field1", "value1")); + bulkRequest.add(new UpdateRequest(indexOrAlias, "id").doc(Requests.INDEX_CONTENT_TYPE, "field1", "value1")); indices.add(indexOrAlias); } @@ -338,7 +334,7 @@ public void testGet() { String getShardAction = GetAction.NAME + "[s]"; interceptTransportActions(getShardAction); - GetRequest getRequest = new GetRequest(randomIndexOrAlias(), "type", "id"); + GetRequest getRequest = new GetRequest(randomIndexOrAlias(), "id"); internalCluster().coordOnlyNodeClient().get(getRequest).actionGet(); clearInterceptedActions(); @@ -349,7 +345,7 @@ public void testExplain() { String explainShardAction = ExplainAction.NAME + "[s]"; interceptTransportActions(explainShardAction); - ExplainRequest explainRequest = new ExplainRequest(randomIndexOrAlias(), "type", "id").query(QueryBuilders.matchAllQuery()); + ExplainRequest explainRequest = new ExplainRequest(randomIndexOrAlias(), "id").query(QueryBuilders.matchAllQuery()); internalCluster().coordOnlyNodeClient().explain(explainRequest).actionGet(); clearInterceptedActions(); @@ -360,7 +356,7 @@ public void testTermVector() { String termVectorShardAction = TermVectorsAction.NAME + "[s]"; interceptTransportActions(termVectorShardAction); - TermVectorsRequest termVectorsRequest = new TermVectorsRequest(randomIndexOrAlias(), "type", "id"); + TermVectorsRequest termVectorsRequest = new TermVectorsRequest(randomIndexOrAlias(), "id"); internalCluster().coordOnlyNodeClient().termVectors(termVectorsRequest).actionGet(); clearInterceptedActions(); @@ -376,7 +372,7 @@ public void testMultiTermVector() { int numDocs = iterations(1, 30); for (int i = 0; i < numDocs; i++) { String indexOrAlias = randomIndexOrAlias(); - multiTermVectorsRequest.add(indexOrAlias, "type", Integer.toString(i)); + multiTermVectorsRequest.add(indexOrAlias, Integer.toString(i)); indices.add(indexOrAlias); } internalCluster().coordOnlyNodeClient().multiTermVectors(multiTermVectorsRequest).actionGet(); @@ -394,7 +390,7 @@ public void testMultiGet() { int numDocs = iterations(1, 30); for (int i = 0; i < numDocs; i++) { String indexOrAlias = randomIndexOrAlias(); - multiGetRequest.add(indexOrAlias, "type", Integer.toString(i)); + multiGetRequest.add(indexOrAlias, Integer.toString(i)); indices.add(indexOrAlias); } internalCluster().coordOnlyNodeClient().multiGet(multiGetRequest).actionGet(); @@ -549,8 +545,7 @@ public void testGetMappings() { public void testPutMapping() { interceptTransportActions(PutMappingAction.NAME); - PutMappingRequest putMappingRequest = new PutMappingRequest(randomUniqueIndicesOrAliases()).type("type") - .source("field", "type=text"); + PutMappingRequest putMappingRequest = new PutMappingRequest(randomUniqueIndicesOrAliases()).source("field", "type=text"); internalCluster().coordOnlyNodeClient().admin().indices().putMapping(putMappingRequest).actionGet(); clearInterceptedActions(); @@ -588,7 +583,7 @@ public void testSearchQueryThenFetch() throws Exception { String[] randomIndicesOrAliases = randomIndicesOrAliases(); for (int i = 0; i < randomIndicesOrAliases.length; i++) { - client().prepareIndex(randomIndicesOrAliases[i], "type", "id-" + i).setSource("field", "value").get(); + client().prepareIndex(randomIndicesOrAliases[i]).setId("id-" + i).setSource("field", "value").get(); } refresh(); @@ -613,7 +608,7 @@ public void testSearchDfsQueryThenFetch() throws Exception { String[] randomIndicesOrAliases = randomIndicesOrAliases(); for (int i = 0; i < randomIndicesOrAliases.length; i++) { - client().prepareIndex(randomIndicesOrAliases[i], "type", "id-" + i).setSource("field", "value").get(); + client().prepareIndex(randomIndicesOrAliases[i]).setId("id-" + i).setSource("field", "value").get(); } refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/ListenerActionIT.java b/server/src/internalClusterTest/java/org/opensearch/action/ListenerActionIT.java index a0ddf68355a63..1512fa4934ca1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/ListenerActionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/ListenerActionIT.java @@ -48,7 +48,7 @@ public void testThreadedListeners() throws Throwable { final AtomicReference threadName = new AtomicReference<>(); Client client = client(); - IndexRequest request = new IndexRequest("test", "type", "1"); + IndexRequest request = new IndexRequest("test").id("1"); if (randomBoolean()) { // set the source, without it, we will have a verification failure request.source(Requests.INDEX_CONTENT_TYPE, "field1", "value1"); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/RejectionActionIT.java b/server/src/internalClusterTest/java/org/opensearch/action/RejectionActionIT.java index e94167fb71552..f930b9e9cfda0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/RejectionActionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/RejectionActionIT.java @@ -69,7 +69,7 @@ protected Settings nodeSettings(int nodeOrdinal) { public void testSimulatedSearchRejectionLoad() throws Throwable { for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "1").get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "1").get(); } int numberOfAsyncOps = randomIntBetween(200, 700); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/HotThreadsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/HotThreadsIT.java index 24d389d8ea03e..ab44c95b4f5a6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/HotThreadsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/HotThreadsIT.java @@ -128,9 +128,9 @@ public void onFailure(Exception e) { indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("field1", "value1"), - client().prepareIndex("test", "type1", "2").setSource("field1", "value2"), - client().prepareIndex("test", "type1", "3").setSource("field1", "value3") + client().prepareIndex("test").setId("1").setSource("field1", "value1"), + client().prepareIndex("test").setId("2").setSource("field1", "value2"), + client().prepareIndex("test").setId("3").setSource("field1", "value3") ); ensureSearchable(); while (latch.getCount() > 0) { diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java index 688ed3b2819b0..ac0ae44eb732e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java @@ -318,10 +318,8 @@ public void testTransportBulkTasks() { createIndex("test"); ensureGreen("test"); // Make sure all shards are allocated to catch replication tasks // ensures the mapping is available on all nodes so we won't retry the request (in case replicas don't have the right mapping). - client().admin().indices().preparePutMapping("test").setType("doc").setSource("foo", "type=keyword").get(); - client().prepareBulk() - .add(client().prepareIndex("test", "doc", "test_id").setSource("{\"foo\": \"bar\"}", XContentType.JSON)) - .get(); + client().admin().indices().preparePutMapping("test").setSource("foo", "type=keyword").get(); + client().prepareBulk().add(client().prepareIndex("test").setId("test_id").setSource("{\"foo\": \"bar\"}", XContentType.JSON)).get(); // the bulk operation should produce one main task List topTask = findEvents(BulkAction.NAME, Tuple::v1); @@ -370,7 +368,8 @@ public void testSearchTaskDescriptions() { registerTaskManagerListeners(SearchAction.NAME + "[*]"); // shard task createIndex("test"); ensureGreen("test"); // Make sure all shards are allocated to catch replication tasks - client().prepareIndex("test", "doc", "test_id") + client().prepareIndex("test") + .setId("test_id") .setSource("{\"foo\": \"bar\"}", XContentType.JSON) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); @@ -379,14 +378,12 @@ public void testSearchTaskDescriptions() { headers.put(Task.X_OPAQUE_ID, "my_id"); headers.put("Foo-Header", "bar"); headers.put("Custom-Task-Header", "my_value"); - assertSearchResponse( - client().filterWithHeader(headers).prepareSearch("test").setTypes("doc").setQuery(QueryBuilders.matchAllQuery()).get() - ); + assertSearchResponse(client().filterWithHeader(headers).prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).get()); // the search operation should produce one main task List mainTask = findEvents(SearchAction.NAME, Tuple::v1); assertEquals(1, mainTask.size()); - assertThat(mainTask.get(0).getDescription(), startsWith("indices[test], types[doc], search_type[")); + assertThat(mainTask.get(0).getDescription(), startsWith("indices[test], search_type[")); assertThat(mainTask.get(0).getDescription(), containsString("\"query\":{\"match_all\"")); assertTaskHeaders(mainTask.get(0)); @@ -477,7 +474,7 @@ public void waitForTaskCompletion(Task task) {} } // Need to run the task in a separate thread because node client's .execute() is blocked by our task listener index = new Thread(() -> { - IndexResponse indexResponse = client().prepareIndex("test", "test").setSource("test", "test").get(); + IndexResponse indexResponse = client().prepareIndex("test").setSource("test", "test").get(); assertArrayEquals(ReplicationResponse.EMPTY, indexResponse.getShardInfo().getFailures()); }); index.start(); @@ -829,14 +826,12 @@ public void testTaskStoringSuccessfulResult() throws Exception { assertNoFailures(client().admin().indices().prepareRefresh(TaskResultsService.TASK_INDEX).get()); SearchResponse searchResponse = client().prepareSearch(TaskResultsService.TASK_INDEX) - .setTypes(TaskResultsService.TASK_TYPE) .setSource(SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("task.action", taskInfo.getAction()))) .get(); assertEquals(1L, searchResponse.getHits().getTotalHits().value); searchResponse = client().prepareSearch(TaskResultsService.TASK_INDEX) - .setTypes(TaskResultsService.TASK_TYPE) .setSource(SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("task.node", taskInfo.getTaskId().getNodeId()))) .get(); @@ -845,6 +840,11 @@ public void testTaskStoringSuccessfulResult() throws Exception { GetTaskResponse getResponse = expectFinishedTask(taskId); assertEquals(result, getResponse.getTask().getResponseAsMap()); assertNull(getResponse.getTask().getError()); + + // run it again to check that the tasks index has been successfully created and can be re-used + client().execute(TestTaskPlugin.TestTaskAction.INSTANCE, request).get(); + events = findEvents(TestTaskPlugin.TestTaskAction.NAME, Tuple::v1); + assertEquals(2, events.size()); } public void testTaskStoringFailureResult() throws Exception { diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/snapshots/SnapshotBlocksIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/snapshots/SnapshotBlocksIT.java index cc855696d5f8e..1731c607a066d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/snapshots/SnapshotBlocksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/snapshots/SnapshotBlocksIT.java @@ -73,11 +73,11 @@ protected void setUpRepository() throws Exception { int docs = between(10, 100); for (int i = 0; i < docs; i++) { - client().prepareIndex(INDEX_NAME, "type").setSource("test", "init").execute().actionGet(); + client().prepareIndex(INDEX_NAME).setSource("test", "init").execute().actionGet(); } docs = between(10, 100); for (int i = 0; i < docs; i++) { - client().prepareIndex(OTHER_INDEX_NAME, "type").setSource("test", "init").execute().actionGet(); + client().prepareIndex(OTHER_INDEX_NAME).setSource("test", "init").execute().actionGet(); } logger.info("--> register a repository"); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/stats/ClusterStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/stats/ClusterStatsIT.java index 19d1728a1fecd..72f34133067ee 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/stats/ClusterStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/stats/ClusterStatsIT.java @@ -42,8 +42,6 @@ import org.opensearch.common.Priority; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.OpenSearchExecutors; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.index.mapper.MapperService; import org.opensearch.monitor.os.OsStats; import org.opensearch.node.NodeRoleSettings; import org.opensearch.test.OpenSearchIntegTestCase; @@ -276,19 +274,13 @@ public void testFieldTypes() { assertThat(response.getStatus(), Matchers.equalTo(ClusterHealthStatus.GREEN)); assertTrue(response.getIndicesStats().getMappings().getFieldTypeStats().isEmpty()); - client().admin() - .indices() - .prepareCreate("test1") - .addMapping(MapperService.SINGLE_MAPPING_NAME, "{\"properties\":{\"foo\":{\"type\": \"keyword\"}}}", XContentType.JSON) - .get(); + client().admin().indices().prepareCreate("test1").setMapping("{\"properties\":{\"foo\":{\"type\": \"keyword\"}}}").get(); client().admin() .indices() .prepareCreate("test2") - .addMapping( - MapperService.SINGLE_MAPPING_NAME, + .setMapping( "{\"properties\":{\"foo\":{\"type\": \"keyword\"},\"bar\":{\"properties\":{\"baz\":{\"type\":\"keyword\"}," - + "\"eggplant\":{\"type\":\"integer\"}}}}}", - XContentType.JSON + + "\"eggplant\":{\"type\":\"integer\"}}}}}" ) .get(); response = client().admin().cluster().prepareClusterStats().get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CloneIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CloneIndexIT.java index 11a3c6708ec93..98fc6483703c4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CloneIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CloneIndexIT.java @@ -62,7 +62,7 @@ public void testCreateCloneIndex() { ).get(); final int docs = randomIntBetween(0, 128); for (int i = 0; i < docs; i++) { - client().prepareIndex("source", "type").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); + client().prepareIndex("source").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } internalCluster().ensureAtLeastNumDataNodes(2); // ensure all shards are allocated otherwise the ensure green below might not succeed since we require the merge node @@ -122,7 +122,7 @@ public void testCreateCloneIndex() { } for (int i = docs; i < 2 * docs; i++) { - client().prepareIndex("target", "type").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); + client().prepareIndex("target").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } flushAndRefresh(); assertHitCount( diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CreateIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CreateIndexIT.java index cee5fd15d54c2..476bd72ee3ca3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CreateIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CreateIndexIT.java @@ -52,19 +52,18 @@ import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.IndexService; import org.opensearch.index.mapper.MapperParsingException; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.RangeQueryBuilder; import org.opensearch.indices.IndicesService; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.OpenSearchIntegTestCase.Scope; -import java.util.HashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiFunction; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_WAIT_FOR_ACTIVE_SHARDS; -import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertBlocked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertRequestBuilderThrows; @@ -109,28 +108,6 @@ public void testCreationDateGenerated() { assertThat(index.getCreationDate(), allOf(lessThanOrEqualTo(timeAfterRequest), greaterThanOrEqualTo(timeBeforeRequest))); } - public void testDoubleAddMapping() throws Exception { - try { - prepareCreate("test").addMapping("type1", "date", "type=date").addMapping("type1", "num", "type=integer"); - fail("did not hit expected exception"); - } catch (IllegalStateException ise) { - // expected - } - try { - prepareCreate("test").addMapping("type1", new HashMap()).addMapping("type1", new HashMap()); - fail("did not hit expected exception"); - } catch (IllegalStateException ise) { - // expected - } - try { - prepareCreate("test").addMapping("type1", jsonBuilder().startObject().endObject()) - .addMapping("type1", jsonBuilder().startObject().endObject()); - fail("did not hit expected exception"); - } catch (IllegalStateException ise) { - // expected - } - } - public void testNonNestedMappings() throws Exception { assertAcked( prepareCreate("test").addMapping( @@ -148,12 +125,9 @@ public void testNonNestedMappings() throws Exception { GetMappingsResponse response = client().admin().indices().prepareGetMappings("test").get(); - ImmutableOpenMap mappings = response.mappings().get("test"); + MappingMetadata mappings = response.mappings().get("test"); assertNotNull(mappings); - - MappingMetadata metadata = mappings.get("_doc"); - assertNotNull(metadata); - assertFalse(metadata.sourceAsMap().isEmpty()); + assertFalse(mappings.sourceAsMap().isEmpty()); } public void testEmptyNestedMappings() throws Exception { @@ -161,23 +135,26 @@ public void testEmptyNestedMappings() throws Exception { GetMappingsResponse response = client().admin().indices().prepareGetMappings("test").get(); - ImmutableOpenMap mappings = response.mappings().get("test"); + MappingMetadata mappings = response.mappings().get("test"); assertNotNull(mappings); - MappingMetadata metadata = mappings.get("_doc"); - assertNotNull(metadata); - assertTrue(metadata.sourceAsMap().isEmpty()); + assertTrue(mappings.sourceAsMap().isEmpty()); } public void testMappingParamAndNestedMismatch() throws Exception { MapperParsingException e = expectThrows( MapperParsingException.class, () -> prepareCreate("test").addMapping( - "type1", + MapperService.SINGLE_MAPPING_NAME, XContentFactory.jsonBuilder().startObject().startObject("type2").endObject().endObject() ).get() ); - assertThat(e.getMessage(), startsWith("Failed to parse mapping [type1]: Root mapping definition has unsupported parameters")); + assertThat( + e.getMessage(), + startsWith( + "Failed to parse mapping [" + MapperService.SINGLE_MAPPING_NAME + "]: Root mapping definition has unsupported parameters" + ) + ); } public void testEmptyMappings() throws Exception { @@ -190,12 +167,9 @@ public void testEmptyMappings() throws Exception { GetMappingsResponse response = client().admin().indices().prepareGetMappings("test").get(); - ImmutableOpenMap mappings = response.mappings().get("test"); + MappingMetadata mappings = response.mappings().get("test"); assertNotNull(mappings); - - MappingMetadata metadata = mappings.get("_doc"); - assertNotNull(metadata); - assertTrue(metadata.sourceAsMap().isEmpty()); + assertTrue(mappings.sourceAsMap().isEmpty()); } public void testInvalidShardCountSettings() throws Exception { @@ -276,7 +250,7 @@ public void testCreateAndDeleteIndexConcurrently() throws InterruptedException { final CountDownLatch latch = new CountDownLatch(1); int numDocs = randomIntBetween(1, 10); for (int i = 0; i < numDocs; i++) { - client().prepareIndex("test", "test").setSource("index_version", indexVersion.get()).get(); + client().prepareIndex("test").setSource("index_version", indexVersion.get()).get(); } synchronized (indexVersionLock) { // not necessarily needed here but for completeness we lock here too indexVersion.incrementAndGet(); @@ -289,7 +263,7 @@ public void onResponse(AcknowledgedResponse deleteIndexResponse) { public void run() { try { // recreate that index - client().prepareIndex("test", "test").setSource("index_version", indexVersion.get()).get(); + client().prepareIndex("test").setSource("index_version", indexVersion.get()).get(); synchronized (indexVersionLock) { // we sync here since we have to ensure that all indexing operations below for a given ID are done before // we increment the index version otherwise a doc that is in-flight could make it into an index that it @@ -315,7 +289,7 @@ public void onFailure(Exception e) { for (int i = 0; i < numDocs; i++) { try { synchronized (indexVersionLock) { - client().prepareIndex("test", "test") + client().prepareIndex("test") .setSource("index_version", indexVersion.get()) .setTimeout(TimeValue.timeValueSeconds(10)) .get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java index a53c28d170a93..ef5c56c50ed83 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java @@ -71,6 +71,7 @@ import org.opensearch.index.Index; import org.opensearch.index.IndexService; import org.opensearch.index.engine.SegmentsStats; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.TermsQueryBuilder; import org.opensearch.index.seqno.SeqNoStats; import org.opensearch.index.shard.IndexShard; @@ -107,7 +108,8 @@ public void testCreateShrinkIndexToN() { internalCluster().ensureAtLeastNumDataNodes(2); prepareCreate("source").setSettings(Settings.builder().put(indexSettings()).put("number_of_shards", shardSplits[0])).get(); for (int i = 0; i < 20; i++) { - client().prepareIndex("source", "t1", Integer.toString(i)) + client().prepareIndex("source") + .setId(Integer.toString(i)) .setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON) .get(); } @@ -150,7 +152,8 @@ public void testCreateShrinkIndexToN() { assertHitCount(client().prepareSearch("first_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 20); for (int i = 0; i < 20; i++) { // now update - client().prepareIndex("first_shrink", "t1", Integer.toString(i)) + client().prepareIndex("first_shrink") + .setId(Integer.toString(i)) .setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON) .get(); } @@ -192,7 +195,8 @@ public void testCreateShrinkIndexToN() { assertHitCount(client().prepareSearch("second_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 20); for (int i = 0; i < 20; i++) { // now update - client().prepareIndex("second_shrink", "t1", Integer.toString(i)) + client().prepareIndex("second_shrink") + .setId(Integer.toString(i)) .setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON) .get(); } @@ -240,10 +244,8 @@ public void testShrinkIndexPrimaryTerm() throws Exception { final String s = Integer.toString(id); final int hash = Math.floorMod(Murmur3HashFunction.hash(s), numberOfShards); if (hash == shardId) { - final IndexRequest request = new IndexRequest("source", "type", s).source( - "{ \"f\": \"" + s + "\"}", - XContentType.JSON - ); + final IndexRequest request = new IndexRequest("source").id(s) + .source("{ \"f\": \"" + s + "\"}", XContentType.JSON); client().index(request).get(); break; } else { @@ -294,7 +296,7 @@ public void testCreateShrinkIndex() { ).get(); final int docs = randomIntBetween(0, 128); for (int i = 0; i < docs; i++) { - client().prepareIndex("source", "type").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); + client().prepareIndex("source").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } ImmutableOpenMap dataNodes = client().admin() .cluster() @@ -395,7 +397,7 @@ public void testCreateShrinkIndex() { } for (int i = docs; i < 2 * docs; i++) { - client().prepareIndex("target", "type").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); + client().prepareIndex("target").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } flushAndRefresh(); assertHitCount(client().prepareSearch("target").setSize(2 * size).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 2 * docs); @@ -422,7 +424,7 @@ public void testCreateShrinkIndexFails() throws Exception { Settings.builder().put(indexSettings()).put("number_of_shards", randomIntBetween(2, 7)).put("number_of_replicas", 0) ).get(); for (int i = 0; i < 20; i++) { - client().prepareIndex("source", "type").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); + client().prepareIndex("source").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } ImmutableOpenMap dataNodes = client().admin() .cluster() @@ -525,9 +527,10 @@ public void testCreateShrinkWithIndexSort() throws Exception { .put("sort.order", "desc") .put("number_of_shards", 8) .put("number_of_replicas", 0) - ).addMapping("type", "id", "type=keyword,doc_values=true").get(); + ).addMapping(MapperService.SINGLE_MAPPING_NAME, "id", "type=keyword,doc_values=true").get(); for (int i = 0; i < 20; i++) { - client().prepareIndex("source", "type", Integer.toString(i)) + client().prepareIndex("source") + .setId(Integer.toString(i)) .setSource("{\"foo\" : \"bar\", \"id\" : " + i + "}", XContentType.JSON) .get(); } @@ -597,7 +600,7 @@ public void testCreateShrinkWithIndexSort() throws Exception { // ... and that the index sort is also applied to updates for (int i = 20; i < 40; i++) { - client().prepareIndex("target", "type").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); + client().prepareIndex("target").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } flushAndRefresh(); assertSortedSegments("target", expectedIndexSort); @@ -608,7 +611,7 @@ public void testShrinkCommitsMergeOnIdle() throws Exception { Settings.builder().put(indexSettings()).put("index.number_of_replicas", 0).put("number_of_shards", 5) ).get(); for (int i = 0; i < 30; i++) { - client().prepareIndex("source", "type").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); + client().prepareIndex("source").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } client().admin().indices().prepareFlush("source").get(); ImmutableOpenMap dataNodes = client().admin() diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/SplitIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/SplitIndexIT.java index 86974322388ab..42b1d5f4a757f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/SplitIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/SplitIndexIT.java @@ -65,6 +65,7 @@ import org.opensearch.index.Index; import org.opensearch.index.IndexService; import org.opensearch.index.engine.SegmentsStats; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.TermsQueryBuilder; import org.opensearch.index.seqno.SeqNoStats; import org.opensearch.index.shard.IndexShard; @@ -135,12 +136,12 @@ private void splitToN(int sourceShards, int firstSplitShards, int secondSplitSha int numRoutingShards = MetadataCreateIndexService.calculateNumRoutingShards(secondSplitShards, Version.CURRENT) - 1; settings.put("index.routing_partition_size", randomIntBetween(1, numRoutingShards)); if (useNested) { - createInitialIndex.addMapping("t1", "_routing", "required=true", "nested1", "type=nested"); + createInitialIndex.addMapping(MapperService.SINGLE_MAPPING_NAME, "_routing", "required=true", "nested1", "type=nested"); } else { - createInitialIndex.addMapping("t1", "_routing", "required=true"); + createInitialIndex.addMapping(MapperService.SINGLE_MAPPING_NAME, "_routing", "required=true"); } } else if (useNested) { - createInitialIndex.addMapping("t1", "nested1", "type=nested"); + createInitialIndex.addMapping(MapperService.SINGLE_MAPPING_NAME, "nested1", "type=nested"); } logger.info("use routing {} use mixed routing {} use nested {}", useRouting, useMixedRouting, useNested); createInitialIndex.setSettings(settings).get(); @@ -150,7 +151,8 @@ private void splitToN(int sourceShards, int firstSplitShards, int secondSplitSha BiFunction indexFunc = (index, id) -> { try { - return client().prepareIndex(index, "t1", Integer.toString(id)) + return client().prepareIndex(index) + .setId(Integer.toString(id)) .setSource( jsonBuilder().startObject() .field("foo", "bar") @@ -229,7 +231,7 @@ private void splitToN(int sourceShards, int firstSplitShards, int secondSplitSha assertHitCount(client().prepareSearch("first_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), numDocs); assertHitCount(client().prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), numDocs); for (int i = 0; i < numDocs; i++) { - GetResponse getResponse = client().prepareGet("first_split", "t1", Integer.toString(i)).setRouting(routingValue[i]).get(); + GetResponse getResponse = client().prepareGet("first_split", Integer.toString(i)).setRouting(routingValue[i]).get(); assertTrue(getResponse.isExists()); } @@ -274,7 +276,7 @@ private void splitToN(int sourceShards, int firstSplitShards, int secondSplitSha } flushAndRefresh(); for (int i = 0; i < numDocs; i++) { - GetResponse getResponse = client().prepareGet("second_split", "t1", Integer.toString(i)).setRouting(routingValue[i]).get(); + GetResponse getResponse = client().prepareGet("second_split", Integer.toString(i)).setRouting(routingValue[i]).get(); assertTrue(getResponse.isExists()); } assertHitCount(client().prepareSearch("second_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), numDocs); @@ -345,10 +347,8 @@ public void testSplitIndexPrimaryTerm() throws Exception { final String s = Integer.toString(id); final int hash = Math.floorMod(Murmur3HashFunction.hash(s), numberOfShards); if (hash == shardId) { - final IndexRequest request = new IndexRequest("source", "type", s).source( - "{ \"f\": \"" + s + "\"}", - XContentType.JSON - ); + final IndexRequest request = new IndexRequest("source").id(s) + .source("{ \"f\": \"" + s + "\"}", XContentType.JSON); client().index(request).get(); break; } else { @@ -404,7 +404,7 @@ public void testCreateSplitIndex() throws Exception { ).get(); final int docs = randomIntBetween(0, 128); for (int i = 0; i < docs; i++) { - client().prepareIndex("source", "type").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); + client().prepareIndex("source").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } // ensure all shards are allocated otherwise the ensure green below might not succeed since we require the merge node // if we change the setting too quickly we will end up with one replica unassigned which can't be assigned anymore due @@ -488,7 +488,7 @@ public void testCreateSplitIndex() throws Exception { } for (int i = docs; i < 2 * docs; i++) { - client().prepareIndex("target", "type").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); + client().prepareIndex("target").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } flushAndRefresh(); assertHitCount( @@ -523,9 +523,10 @@ public void testCreateSplitWithIndexSort() throws Exception { .put("sort.order", "desc") .put("number_of_shards", 2) .put("number_of_replicas", 0) - ).addMapping("type", "id", "type=keyword,doc_values=true").get(); + ).addMapping(MapperService.SINGLE_MAPPING_NAME, "id", "type=keyword,doc_values=true").get(); for (int i = 0; i < 20; i++) { - client().prepareIndex("source", "type", Integer.toString(i)) + client().prepareIndex("source") + .setId(Integer.toString(i)) .setSource("{\"foo\" : \"bar\", \"id\" : " + i + "}", XContentType.JSON) .get(); } @@ -582,7 +583,7 @@ public void testCreateSplitWithIndexSort() throws Exception { // ... and that the index sort is also applied to updates for (int i = 20; i < 40; i++) { - client().prepareIndex("target", "type").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); + client().prepareIndex("target").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } flushAndRefresh(); assertSortedSegments("target", expectedIndexSort); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/delete/DeleteIndexBlocksIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/delete/DeleteIndexBlocksIT.java index 0dd4ff1ba863c..1ab5826329c8f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/delete/DeleteIndexBlocksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/delete/DeleteIndexBlocksIT.java @@ -58,14 +58,14 @@ public void testDeleteIndexWithBlocks() { public void testDeleteIndexOnIndexReadOnlyAllowDeleteSetting() { createIndex("test"); ensureGreen("test"); - client().prepareIndex().setIndex("test").setType("doc").setId("1").setSource("foo", "bar").get(); + client().prepareIndex().setIndex("test").setId("1").setSource("foo", "bar").get(); refresh(); try { Settings settings = Settings.builder().put(IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE, true).build(); assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(settings).get()); assertSearchHits(client().prepareSearch().get(), "1"); assertBlocked( - client().prepareIndex().setIndex("test").setType("doc").setId("2").setSource("foo", "bar"), + client().prepareIndex().setIndex("test").setId("2").setSource("foo", "bar"), IndexMetadata.INDEX_READ_ONLY_ALLOW_DELETE_BLOCK ); assertBlocked( @@ -95,7 +95,7 @@ public void testClusterBlockMessageHasIndexName() { client().admin().indices().prepareUpdateSettings("test").setSettings(settings).get(); ClusterBlockException e = expectThrows( ClusterBlockException.class, - () -> client().prepareIndex().setIndex("test").setType("doc").setId("1").setSource("foo", "bar").get() + () -> client().prepareIndex().setIndex("test").setId("1").setSource("foo", "bar").get() ); assertEquals( "index [test] blocked by: [TOO_MANY_REQUESTS/12/disk usage exceeded flood-stage watermark, " @@ -116,14 +116,14 @@ public void testClusterBlockMessageHasIndexName() { public void testDeleteIndexOnClusterReadOnlyAllowDeleteSetting() { createIndex("test"); ensureGreen("test"); - client().prepareIndex().setIndex("test").setType("doc").setId("1").setSource("foo", "bar").get(); + client().prepareIndex().setIndex("test").setId("1").setSource("foo", "bar").get(); refresh(); try { Settings settings = Settings.builder().put(Metadata.SETTING_READ_ONLY_ALLOW_DELETE_SETTING.getKey(), true).build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings).get()); assertSearchHits(client().prepareSearch().get(), "1"); assertBlocked( - client().prepareIndex().setIndex("test").setType("doc").setId("2").setSource("foo", "bar"), + client().prepareIndex().setIndex("test").setId("2").setSource("foo", "bar"), Metadata.CLUSTER_READ_ONLY_ALLOW_DELETE_BLOCK ); assertBlocked( diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/flush/FlushBlocksIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/flush/FlushBlocksIT.java index 07fc8b9cac124..f780f505a6557 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/flush/FlushBlocksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/flush/FlushBlocksIT.java @@ -55,7 +55,7 @@ public void testFlushWithBlocks() { int docs = between(10, 100); for (int i = 0; i < docs; i++) { - client().prepareIndex("test", "type", "" + i).setSource("test", "init").execute().actionGet(); + client().prepareIndex("test").setId("" + i).setSource("test", "init").execute().actionGet(); } // Request is not blocked diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeBlocksIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeBlocksIT.java index 3a5de998c9f7b..b279623c46969 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeBlocksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeBlocksIT.java @@ -57,7 +57,7 @@ public void testForceMergeWithBlocks() { int docs = between(10, 100); for (int i = 0; i < docs; i++) { - client().prepareIndex("test", "type", "" + i).setSource("test", "init").execute().actionGet(); + client().prepareIndex("test").setId("" + i).setSource("test", "init").execute().actionGet(); } // Request is not blocked diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeIT.java index a31976c969aaa..195817bf04cc9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeIT.java @@ -32,11 +32,13 @@ package org.opensearch.action.admin.indices.forcemerge; +import org.apache.lucene.index.IndexCommit; import org.opensearch.action.admin.indices.flush.FlushResponse; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.routing.IndexRoutingTable; import org.opensearch.cluster.routing.IndexShardRoutingTable; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.settings.Settings; import org.opensearch.index.Index; import org.opensearch.index.engine.Engine; @@ -99,8 +101,8 @@ public void testForceMergeUUIDConsistent() throws IOException { } private static String getForceMergeUUID(IndexShard indexShard) throws IOException { - try (Engine.IndexCommitRef indexCommitRef = indexShard.acquireLastIndexCommit(true)) { - return indexCommitRef.getIndexCommit().getUserData().get(Engine.FORCE_MERGE_UUID_KEY); + try (GatedCloseable wrappedIndexCommit = indexShard.acquireLastIndexCommit(true)) { + return wrappedIndexCommit.get().getUserData().get(Engine.FORCE_MERGE_UUID_KEY); } } } diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/get/GetIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/get/GetIndexIT.java index ad6c9ecfb5663..ffc738ac98de5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/get/GetIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/get/GetIndexIT.java @@ -35,11 +35,11 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.opensearch.action.admin.indices.alias.Alias; import org.opensearch.action.admin.indices.get.GetIndexRequest.Feature; +import org.opensearch.action.support.IndicesOptions; import org.opensearch.cluster.metadata.AliasMetadata; import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.IndexNotFoundException; import org.opensearch.test.OpenSearchIntegTestCase; @@ -63,12 +63,7 @@ public class GetIndexIT extends OpenSearchIntegTestCase { @Override protected void setupSuiteScopeCluster() throws Exception { - assertAcked( - prepareCreate("idx").addAlias(new Alias("alias_idx")) - .addMapping("type1", "{\"type1\":{}}", XContentType.JSON) - .setSettings(Settings.builder().put("number_of_shards", 1)) - .get() - ); + assertAcked(prepareCreate("idx").addAlias(new Alias("alias_idx")).setSettings(Settings.builder().put("number_of_shards", 1)).get()); ensureSearchable("idx"); createIndex("empty_idx"); ensureSearchable("idx", "empty_idx"); @@ -94,6 +89,19 @@ public void testSimpleUnknownIndex() { } } + public void testUnknownIndexWithAllowNoIndices() { + GetIndexResponse response = client().admin() + .indices() + .prepareGetIndex() + .addIndices("missing_idx") + .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN) + .get(); + assertThat(response.indices(), notNullValue()); + assertThat(response.indices().length, equalTo(0)); + assertThat(response.mappings(), notNullValue()); + assertThat(response.mappings().size(), equalTo(0)); + } + public void testEmpty() { GetIndexResponse response = client().admin().indices().prepareGetIndex().addIndices("empty_idx").get(); String[] indices = response.indices(); @@ -263,24 +271,19 @@ private void assertNonEmptySettings(GetIndexResponse response, String indexName) } private void assertMappings(GetIndexResponse response, String indexName) { - ImmutableOpenMap> mappings = response.mappings(); + ImmutableOpenMap mappings = response.mappings(); assertThat(mappings, notNullValue()); assertThat(mappings.size(), equalTo(1)); - ImmutableOpenMap indexMappings = mappings.get(indexName); + MappingMetadata indexMappings = mappings.get(indexName); assertThat(indexMappings, notNullValue()); - assertThat(indexMappings.size(), equalTo(1)); - MappingMetadata mapping = indexMappings.get("type1"); - assertThat(mapping, notNullValue()); - assertThat(mapping.type(), equalTo("type1")); } private void assertEmptyOrOnlyDefaultMappings(GetIndexResponse response, String indexName) { - ImmutableOpenMap> mappings = response.mappings(); + ImmutableOpenMap mappings = response.mappings(); assertThat(mappings, notNullValue()); assertThat(mappings.size(), equalTo(1)); - ImmutableOpenMap indexMappings = mappings.get(indexName); - assertThat(indexMappings, notNullValue()); - assertThat(indexMappings.size(), equalTo(0)); + MappingMetadata indexMappings = mappings.get(indexName); + assertEquals(indexMappings, MappingMetadata.EMPTY_MAPPINGS); } private void assertAliases(GetIndexResponse response, String indexName) { diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/mapping/put/ValidateMappingRequestPluginIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/mapping/put/ValidateMappingRequestPluginIT.java index 3f67495fd746c..fe1bc05dc5f20 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/mapping/put/ValidateMappingRequestPluginIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/mapping/put/ValidateMappingRequestPluginIT.java @@ -80,40 +80,38 @@ public void testValidateMappingRequest() { allowedOrigins.put("index_2", Arrays.asList("2", "3")); { String origin = randomFrom("", "3", "4", "5"); - PutMappingRequest request = new PutMappingRequest().indices("index_1").type("doc").source("t1", "type=keyword").origin(origin); + PutMappingRequest request = new PutMappingRequest().indices("index_1").source("t1", "type=keyword").origin(origin); Exception e = expectThrows(IllegalStateException.class, () -> client().admin().indices().putMapping(request).actionGet()); assertThat(e.getMessage(), equalTo("not allowed: index[index_1] origin[" + origin + "]")); } { PutMappingRequest request = new PutMappingRequest().indices("index_1") .origin(randomFrom("1", "2")) - .type("doc") .source("t1", "type=keyword"); assertAcked(client().admin().indices().putMapping(request).actionGet()); } { String origin = randomFrom("", "1", "4", "5"); - PutMappingRequest request = new PutMappingRequest().indices("index_2").type("doc").source("t2", "type=keyword").origin(origin); + PutMappingRequest request = new PutMappingRequest().indices("index_2").source("t2", "type=keyword").origin(origin); Exception e = expectThrows(IllegalStateException.class, () -> client().admin().indices().putMapping(request).actionGet()); assertThat(e.getMessage(), equalTo("not allowed: index[index_2] origin[" + origin + "]")); } { PutMappingRequest request = new PutMappingRequest().indices("index_2") .origin(randomFrom("2", "3")) - .type("doc") .source("t1", "type=keyword"); assertAcked(client().admin().indices().putMapping(request).actionGet()); } { String origin = randomFrom("", "1", "3", "4"); - PutMappingRequest request = new PutMappingRequest().indices("*").type("doc").source("t3", "type=keyword").origin(origin); + PutMappingRequest request = new PutMappingRequest().indices("*").source("t3", "type=keyword").origin(origin); Exception e = expectThrows(IllegalStateException.class, () -> client().admin().indices().putMapping(request).actionGet()); assertThat(e.getMessage(), containsString("not allowed:")); } { - PutMappingRequest request = new PutMappingRequest().indices("index_2").origin("2").type("doc").source("t3", "type=keyword"); + PutMappingRequest request = new PutMappingRequest().indices("index_2").origin("2").source("t3", "type=keyword"); assertAcked(client().admin().indices().putMapping(request).actionGet()); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsBlocksIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsBlocksIT.java index 992a4fcb8eab7..df885848d82ce 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsBlocksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsBlocksIT.java @@ -53,7 +53,7 @@ public void testIndicesSegmentsWithBlocks() { int docs = between(10, 100); for (int i = 0; i < docs; i++) { - client().prepareIndex("test-blocks", "type", "" + i).setSource("test", "init").execute().actionGet(); + client().prepareIndex("test-blocks").setId("" + i).setSource("test", "init").execute().actionGet(); } client().admin().indices().prepareFlush("test-blocks").get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java index 714b366fdda8c..ea9f7e0a7232d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java @@ -236,7 +236,7 @@ private void indexRandomData(String index) throws ExecutionException, Interrupte int numDocs = scaledRandomIntBetween(10, 20); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex(index, "type").setSource("field", "value"); + builders[i] = client().prepareIndex(index).setSource("field", "value"); } indexRandom(true, builders); client().admin().indices().prepareFlush().setForce(true).execute().actionGet(); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkIntegrationIT.java index e33b140d288ac..e2a1363f163da 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkIntegrationIT.java @@ -88,7 +88,6 @@ public void testBulkIndexCreatesMapping() throws Exception { assertBusy(() -> { GetMappingsResponse mappingsResponse = client().admin().indices().prepareGetMappings().get(); assertTrue(mappingsResponse.getMappings().containsKey("logstash-2014.03.30")); - assertTrue(mappingsResponse.getMappings().get("logstash-2014.03.30").containsKey("logs")); }); } @@ -117,7 +116,7 @@ public void testBulkWithWriteIndexAndRouting() { .setSettings(twoShardsSettings) .get(); - IndexRequest indexRequestWithAlias = new IndexRequest("alias1", "type", "id"); + IndexRequest indexRequestWithAlias = new IndexRequest("alias1").id("id"); if (randomBoolean()) { indexRequestWithAlias.routing("1"); } @@ -127,19 +126,19 @@ public void testBulkWithWriteIndexAndRouting() { assertThat(bulkResponse.getItems()[0].getResponse().getShardId().getId(), equalTo(0)); assertThat(bulkResponse.getItems()[0].getResponse().getVersion(), equalTo(1L)); assertThat(bulkResponse.getItems()[0].getResponse().status(), equalTo(RestStatus.CREATED)); - assertThat(client().prepareGet("index3", "type", "id").setRouting("1").get().getSource().get("foo"), equalTo("baz")); + assertThat(client().prepareGet("index3", "id").setRouting("1").get().getSource().get("foo"), equalTo("baz")); - bulkResponse = client().prepareBulk().add(client().prepareUpdate("alias1", "type", "id").setDoc("foo", "updated")).get(); + bulkResponse = client().prepareBulk().add(client().prepareUpdate("alias1", "id").setDoc("foo", "updated")).get(); assertFalse(bulkResponse.buildFailureMessage(), bulkResponse.hasFailures()); - assertThat(client().prepareGet("index3", "type", "id").setRouting("1").get().getSource().get("foo"), equalTo("updated")); - bulkResponse = client().prepareBulk().add(client().prepareDelete("alias1", "type", "id")).get(); + assertThat(client().prepareGet("index3", "id").setRouting("1").get().getSource().get("foo"), equalTo("updated")); + bulkResponse = client().prepareBulk().add(client().prepareDelete("alias1", "id")).get(); assertFalse(bulkResponse.buildFailureMessage(), bulkResponse.hasFailures()); - assertFalse(client().prepareGet("index3", "type", "id").setRouting("1").get().isExists()); + assertFalse(client().prepareGet("index3", "id").setRouting("1").get().isExists()); } // allowing the auto-generated timestamp to externally be set would allow making the index inconsistent with duplicate docs public void testExternallySetAutoGeneratedTimestamp() { - IndexRequest indexRequest = new IndexRequest("index1", "_doc").source(Collections.singletonMap("foo", "baz")); + IndexRequest indexRequest = new IndexRequest("index1").source(Collections.singletonMap("foo", "baz")); indexRequest.process(Version.CURRENT, null, null); // sets the timestamp if (randomBoolean()) { indexRequest.id("test"); @@ -163,7 +162,7 @@ public void testBulkWithGlobalDefaults() throws Exception { { createSamplePipeline("pipeline"); - BulkRequestBuilder bulkBuilder = client().prepareBulk("test", "type1").routing("routing").pipeline("pipeline"); + BulkRequestBuilder bulkBuilder = client().prepareBulk("test").routing("routing").pipeline("pipeline"); bulkBuilder.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON); BulkResponse bulkItemResponses = bulkBuilder.get(); @@ -201,7 +200,7 @@ public void testDeleteIndexWhileIndexing() throws Exception { while (stopped.get() == false && docID.get() < 5000) { String id = Integer.toString(docID.incrementAndGet()); try { - IndexResponse response = client().prepareIndex(index, "_doc") + IndexResponse response = client().prepareIndex(index) .setId(id) .setSource(Collections.singletonMap("f" + randomIntBetween(1, 10), randomNonNegativeLong()), XContentType.JSON) .get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorClusterSettingsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorClusterSettingsIT.java index 7532e5dc1067c..14531787e9903 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorClusterSettingsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorClusterSettingsIT.java @@ -50,9 +50,9 @@ public void testBulkProcessorAutoCreateRestrictions() throws Exception { client().admin().cluster().prepareHealth("willwork").setWaitForGreenStatus().execute().actionGet(); BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); - bulkRequestBuilder.add(client().prepareIndex("willwork", "type1", "1").setSource("{\"foo\":1}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("wontwork", "type1", "2").setSource("{\"foo\":2}", XContentType.JSON)); - bulkRequestBuilder.add(client().prepareIndex("willwork", "type1", "3").setSource("{\"foo\":3}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("willwork").setId("1").setSource("{\"foo\":1}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("wontwork").setId("2").setSource("{\"foo\":2}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("willwork").setId("3").setSource("{\"foo\":3}", XContentType.JSON)); BulkResponse br = bulkRequestBuilder.get(); BulkItemResponse[] responses = br.getItems(); assertEquals(3, responses.length); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorIT.java b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorIT.java index 20791f46ade59..850034bc631b1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorIT.java @@ -40,14 +40,10 @@ import org.opensearch.client.Client; import org.opensearch.client.Requests; import org.opensearch.cluster.metadata.IndexMetadata; -import org.opensearch.common.Strings; -import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.ByteSizeUnit; import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.test.OpenSearchIntegTestCase; import java.util.Arrays; @@ -173,7 +169,6 @@ public void testBulkProcessorConcurrentRequests() throws Exception { for (BulkItemResponse bulkItemResponse : listener.bulkItems) { assertThat(bulkItemResponse.getFailureMessage(), bulkItemResponse.isFailed(), equalTo(false)); assertThat(bulkItemResponse.getIndex(), equalTo("test")); - assertThat(bulkItemResponse.getType(), equalTo("test")); // with concurrent requests > 1 we can't rely on the order of the bulk requests assertThat(Integer.valueOf(bulkItemResponse.getId()), both(greaterThan(0)).and(lessThanOrEqualTo(numDocs))); // we do want to check that we don't get duplicate ids back @@ -253,17 +248,14 @@ public void testBulkProcessorConcurrentRequestsReadOnlyIndex() throws Exception if (randomBoolean()) { testDocs++; processor.add( - new IndexRequest("test", "test", Integer.toString(testDocs)).source(Requests.INDEX_CONTENT_TYPE, "field", "value") + new IndexRequest("test").id(Integer.toString(testDocs)).source(Requests.INDEX_CONTENT_TYPE, "field", "value") ); - multiGetRequestBuilder.add("test", "test", Integer.toString(testDocs)); + multiGetRequestBuilder.add("test", Integer.toString(testDocs)); } else { testReadOnlyDocs++; processor.add( - new IndexRequest("test-ro", "test", Integer.toString(testReadOnlyDocs)).source( - Requests.INDEX_CONTENT_TYPE, - "field", - "value" - ) + new IndexRequest("test-ro").id(Integer.toString(testReadOnlyDocs)) + .source(Requests.INDEX_CONTENT_TYPE, "field", "value") ); } } @@ -280,7 +272,6 @@ public void testBulkProcessorConcurrentRequestsReadOnlyIndex() throws Exception Set readOnlyIds = new HashSet<>(); for (BulkItemResponse bulkItemResponse : listener.bulkItems) { assertThat(bulkItemResponse.getIndex(), either(equalTo("test")).or(equalTo("test-ro"))); - assertThat(bulkItemResponse.getType(), equalTo("test")); if (bulkItemResponse.getIndex().equals("test")) { assertThat(bulkItemResponse.isFailed(), equalTo(false)); // with concurrent requests > 1 we can't rely on the order of the bulk requests @@ -302,25 +293,11 @@ public void testBulkProcessorConcurrentRequestsReadOnlyIndex() throws Exception private static MultiGetRequestBuilder indexDocs(Client client, BulkProcessor processor, int numDocs) throws Exception { MultiGetRequestBuilder multiGetRequestBuilder = client.prepareMultiGet(); for (int i = 1; i <= numDocs; i++) { - if (randomBoolean()) { - processor.add( - new IndexRequest("test", "test", Integer.toString(i)).source( - Requests.INDEX_CONTENT_TYPE, - "field", - randomRealisticUnicodeOfLengthBetween(1, 30) - ) - ); - } else { - final String source = "{ \"index\":{\"_index\":\"test\",\"_type\":\"test\",\"_id\":\"" - + Integer.toString(i) - + "\"} }\n" - + Strings.toString( - JsonXContent.contentBuilder().startObject().field("field", randomRealisticUnicodeOfLengthBetween(1, 30)).endObject() - ) - + "\n"; - processor.add(new BytesArray(source), null, null, XContentType.JSON); - } - multiGetRequestBuilder.add("test", "test", Integer.toString(i)); + processor.add( + new IndexRequest("test").id(Integer.toString(i)) + .source(Requests.INDEX_CONTENT_TYPE, "field", randomRealisticUnicodeOfLengthBetween(1, 30)) + ); + multiGetRequestBuilder.add("test", Integer.toString(i)); } return multiGetRequestBuilder; } @@ -330,7 +307,6 @@ private static void assertResponseItems(List bulkItemResponses int i = 1; for (BulkItemResponse bulkItemResponse : bulkItemResponses) { assertThat(bulkItemResponse.getIndex(), equalTo("test")); - assertThat(bulkItemResponse.getType(), equalTo("test")); assertThat(bulkItemResponse.getId(), equalTo(Integer.toString(i++))); assertThat( "item " + i + " failed with cause: " + bulkItemResponse.getFailureMessage(), @@ -345,7 +321,6 @@ private static void assertMultiGetResponse(MultiGetResponse multiGetResponse, in int i = 1; for (MultiGetItemResponse multiGetItemResponse : multiGetResponse) { assertThat(multiGetItemResponse.getIndex(), equalTo("test")); - assertThat(multiGetItemResponse.getType(), equalTo("test")); assertThat(multiGetItemResponse.getId(), equalTo(Integer.toString(i++))); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorRetryIT.java b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorRetryIT.java index bcda78ed6f788..687a4e9b733fd 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorRetryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorRetryIT.java @@ -57,7 +57,6 @@ @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE, numDataNodes = 2) public class BulkProcessorRetryIT extends OpenSearchIntegTestCase { private static final String INDEX_NAME = "test"; - private static final String TYPE_NAME = "type"; @Override protected Settings nodeSettings(int nodeOrdinal) { @@ -159,11 +158,7 @@ public void afterBulk(long executionId, BulkRequest request, Throwable failure) client().admin().indices().refresh(new RefreshRequest()).get(); - SearchResponse results = client().prepareSearch(INDEX_NAME) - .setTypes(TYPE_NAME) - .setQuery(QueryBuilders.matchAllQuery()) - .setSize(0) - .get(); + SearchResponse results = client().prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0).get(); if (rejectedExecutionExpected) { assertThat((int) results.getHits().getTotalHits().value, lessThanOrEqualTo(numberOfAsyncOps)); @@ -190,7 +185,6 @@ private static void indexDocs(BulkProcessor processor, int numDocs) { processor.add( client().prepareIndex() .setIndex(INDEX_NAME) - .setType(TYPE_NAME) .setId(Integer.toString(i)) .setSource("field", randomRealisticUnicodeOfLengthBetween(1, 30)) .request() diff --git a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java index 570d1055a7a6c..6311ac6876192 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java @@ -137,11 +137,11 @@ public void testBulkUpdateSimple() throws Exception { ensureGreen(); BulkResponse bulkResponse = client().prepareBulk() - .add(client().prepareIndex().setIndex(indexOrAlias()).setType("type1").setId("1").setSource("field", 1)) - .add(client().prepareIndex().setIndex(indexOrAlias()).setType("type1").setId("2").setSource("field", 2).setCreate(true)) - .add(client().prepareIndex().setIndex(indexOrAlias()).setType("type1").setId("3").setSource("field", 3)) - .add(client().prepareIndex().setIndex(indexOrAlias()).setType("type1").setId("4").setSource("field", 4)) - .add(client().prepareIndex().setIndex(indexOrAlias()).setType("type1").setId("5").setSource("field", 5)) + .add(client().prepareIndex().setIndex(indexOrAlias()).setId("1").setSource("field", 1)) + .add(client().prepareIndex().setIndex(indexOrAlias()).setId("2").setSource("field", 2).setCreate(true)) + .add(client().prepareIndex().setIndex(indexOrAlias()).setId("3").setSource("field", 3)) + .add(client().prepareIndex().setIndex(indexOrAlias()).setId("4").setSource("field", 4)) + .add(client().prepareIndex().setIndex(indexOrAlias()).setId("5").setSource("field", 5)) .execute() .actionGet(); @@ -154,12 +154,11 @@ public void testBulkUpdateSimple() throws Exception { final Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx._source.field += 1", Collections.emptyMap()); bulkResponse = client().prepareBulk() - .add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("1").setScript(script)) - .add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("2").setScript(script).setRetryOnConflict(3)) + .add(client().prepareUpdate().setIndex(indexOrAlias()).setId("1").setScript(script)) + .add(client().prepareUpdate().setIndex(indexOrAlias()).setId("2").setScript(script).setRetryOnConflict(3)) .add( client().prepareUpdate() .setIndex(indexOrAlias()) - .setType("type1") .setId("3") .setDoc(jsonBuilder().startObject().field("field1", "test").endObject()) ) @@ -177,17 +176,17 @@ public void testBulkUpdateSimple() throws Exception { assertThat(bulkResponse.getItems()[2].getResponse().getId(), equalTo("3")); assertThat(bulkResponse.getItems()[2].getResponse().getVersion(), equalTo(2L)); - GetResponse getResponse = client().prepareGet().setIndex("test").setType("type1").setId("1").execute().actionGet(); + GetResponse getResponse = client().prepareGet().setIndex("test").setId("1").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getVersion(), equalTo(2L)); assertThat(((Number) getResponse.getSource().get("field")).longValue(), equalTo(2L)); - getResponse = client().prepareGet().setIndex("test").setType("type1").setId("2").execute().actionGet(); + getResponse = client().prepareGet().setIndex("test").setId("2").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getVersion(), equalTo(2L)); assertThat(((Number) getResponse.getSource().get("field")).longValue(), equalTo(3L)); - getResponse = client().prepareGet().setIndex("test").setType("type1").setId("3").execute().actionGet(); + getResponse = client().prepareGet().setIndex("test").setId("3").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getVersion(), equalTo(2L)); assertThat(getResponse.getSource().get("field1").toString(), equalTo("test")); @@ -196,13 +195,12 @@ public void testBulkUpdateSimple() throws Exception { .add( client().prepareUpdate() .setIndex(indexOrAlias()) - .setType("type1") .setId("6") .setScript(script) .setUpsert(jsonBuilder().startObject().field("field", 0).endObject()) ) - .add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("7").setScript(script)) - .add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("2").setScript(script)) + .add(client().prepareUpdate().setIndex(indexOrAlias()).setId("7").setScript(script)) + .add(client().prepareUpdate().setIndex(indexOrAlias()).setId("2").setScript(script)) .get(); assertThat(bulkResponse.hasFailures(), equalTo(true)); @@ -217,15 +215,15 @@ public void testBulkUpdateSimple() throws Exception { assertThat(bulkResponse.getItems()[2].getResponse().getIndex(), equalTo("test")); assertThat(bulkResponse.getItems()[2].getResponse().getVersion(), equalTo(3L)); - getResponse = client().prepareGet().setIndex("test").setType("type1").setId("6").execute().actionGet(); + getResponse = client().prepareGet().setIndex("test").setId("6").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getVersion(), equalTo(1L)); assertThat(((Number) getResponse.getSource().get("field")).longValue(), equalTo(0L)); - getResponse = client().prepareGet().setIndex("test").setType("type1").setId("7").execute().actionGet(); + getResponse = client().prepareGet().setIndex("test").setId("7").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(false)); - getResponse = client().prepareGet().setIndex("test").setType("type1").setId("2").execute().actionGet(); + getResponse = client().prepareGet().setIndex("test").setId("2").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getVersion(), equalTo(3L)); assertThat(((Number) getResponse.getSource().get("field")).longValue(), equalTo(4L)); @@ -273,9 +271,9 @@ public void testBulkWithCAS() throws Exception { createIndex("test", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).build()); ensureGreen(); BulkResponse bulkResponse = client().prepareBulk() - .add(client().prepareIndex("test", "type", "1").setCreate(true).setSource("field", "1")) - .add(client().prepareIndex("test", "type", "2").setCreate(true).setSource("field", "1")) - .add(client().prepareIndex("test", "type", "1").setSource("field", "2")) + .add(client().prepareIndex("test").setId("1").setCreate(true).setSource("field", "1")) + .add(client().prepareIndex("test").setId("2").setCreate(true).setSource("field", "1")) + .add(client().prepareIndex("test").setId("1").setSource("field", "2")) .get(); assertEquals(DocWriteResponse.Result.CREATED, bulkResponse.getItems()[0].getResponse().getResult()); @@ -286,19 +284,9 @@ public void testBulkWithCAS() throws Exception { assertThat(bulkResponse.getItems()[2].getResponse().getSeqNo(), equalTo(2L)); bulkResponse = client().prepareBulk() - .add( - client().prepareUpdate("test", "type", "1") - .setIfSeqNo(40L) - .setIfPrimaryTerm(20) - .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "2") - ) - .add(client().prepareUpdate("test", "type", "2").setDoc(Requests.INDEX_CONTENT_TYPE, "field", "2")) - .add( - client().prepareUpdate("test", "type", "1") - .setIfSeqNo(2L) - .setIfPrimaryTerm(1) - .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "3") - ) + .add(client().prepareUpdate("test", "1").setIfSeqNo(40L).setIfPrimaryTerm(20).setDoc(Requests.INDEX_CONTENT_TYPE, "field", "2")) + .add(client().prepareUpdate("test", "2").setDoc(Requests.INDEX_CONTENT_TYPE, "field", "2")) + .add(client().prepareUpdate("test", "1").setIfSeqNo(2L).setIfPrimaryTerm(1).setDoc(Requests.INDEX_CONTENT_TYPE, "field", "3")) .get(); assertThat(bulkResponse.getItems()[0].getFailureMessage(), containsString("version conflict")); @@ -306,9 +294,9 @@ public void testBulkWithCAS() throws Exception { assertThat(bulkResponse.getItems()[2].getResponse().getSeqNo(), equalTo(4L)); bulkResponse = client().prepareBulk() - .add(client().prepareIndex("test", "type", "e1").setSource("field", "1").setVersion(10).setVersionType(VersionType.EXTERNAL)) - .add(client().prepareIndex("test", "type", "e2").setSource("field", "1").setVersion(10).setVersionType(VersionType.EXTERNAL)) - .add(client().prepareIndex("test", "type", "e1").setSource("field", "2").setVersion(12).setVersionType(VersionType.EXTERNAL)) + .add(client().prepareIndex("test").setId("e1").setSource("field", "1").setVersion(10).setVersionType(VersionType.EXTERNAL)) + .add(client().prepareIndex("test").setId("e2").setSource("field", "1").setVersion(10).setVersionType(VersionType.EXTERNAL)) + .add(client().prepareIndex("test").setId("e1").setSource("field", "2").setVersion(12).setVersionType(VersionType.EXTERNAL)) .get(); assertEquals(DocWriteResponse.Result.CREATED, bulkResponse.getItems()[0].getResponse().getResult()); @@ -319,18 +307,8 @@ public void testBulkWithCAS() throws Exception { assertThat(bulkResponse.getItems()[2].getResponse().getVersion(), equalTo(12L)); bulkResponse = client().prepareBulk() - .add( - client().prepareUpdate("test", "type", "e1") - .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "2") - .setIfSeqNo(10L) - .setIfPrimaryTerm(1) - ) - .add( - client().prepareUpdate("test", "type", "e1") - .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "3") - .setIfSeqNo(20L) - .setIfPrimaryTerm(1) - ) + .add(client().prepareUpdate("test", "e1").setDoc(Requests.INDEX_CONTENT_TYPE, "field", "2").setIfSeqNo(10L).setIfPrimaryTerm(1)) + .add(client().prepareUpdate("test", "e1").setDoc(Requests.INDEX_CONTENT_TYPE, "field", "3").setIfSeqNo(20L).setIfPrimaryTerm(1)) .get(); assertThat(bulkResponse.getItems()[0].getFailureMessage(), containsString("version conflict")); @@ -342,9 +320,9 @@ public void testBulkUpdateMalformedScripts() throws Exception { ensureGreen(); BulkResponse bulkResponse = client().prepareBulk() - .add(client().prepareIndex().setIndex("test").setType("type1").setId("1").setSource("field", 1)) - .add(client().prepareIndex().setIndex("test").setType("type1").setId("2").setSource("field", 1)) - .add(client().prepareIndex().setIndex("test").setType("type1").setId("3").setSource("field", 1)) + .add(client().prepareIndex().setIndex("test").setId("1").setSource("field", 1)) + .add(client().prepareIndex().setIndex("test").setId("2").setSource("field", 1)) + .add(client().prepareIndex().setIndex("test").setId("3").setSource("field", 1)) .execute() .actionGet(); @@ -355,7 +333,6 @@ public void testBulkUpdateMalformedScripts() throws Exception { .add( client().prepareUpdate() .setIndex("test") - .setType("type1") .setId("1") .setFetchSource("field", null) .setScript( @@ -370,7 +347,6 @@ public void testBulkUpdateMalformedScripts() throws Exception { .add( client().prepareUpdate() .setIndex("test") - .setType("type1") .setId("2") .setFetchSource("field", null) .setScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx._source.field += 1", Collections.emptyMap())) @@ -378,7 +354,6 @@ public void testBulkUpdateMalformedScripts() throws Exception { .add( client().prepareUpdate() .setIndex("test") - .setType("type1") .setId("3") .setFetchSource("field", null) .setScript( @@ -425,7 +400,6 @@ public void testBulkUpdateLargerVolume() throws Exception { builder.add( client().prepareUpdate() .setIndex("test") - .setType("type1") .setId(Integer.toString(i)) .setFetchSource("counter", null) .setScript(script) @@ -440,14 +414,13 @@ public void testBulkUpdateLargerVolume() throws Exception { assertThat(response.getItems()[i].getId(), equalTo(Integer.toString(i))); assertThat(response.getItems()[i].getVersion(), equalTo(1L)); assertThat(response.getItems()[i].getIndex(), equalTo("test")); - assertThat(response.getItems()[i].getType(), equalTo("type1")); assertThat(response.getItems()[i].getOpType(), equalTo(OpType.UPDATE)); assertThat(response.getItems()[i].getResponse().getId(), equalTo(Integer.toString(i))); assertThat(response.getItems()[i].getResponse().getVersion(), equalTo(1L)); assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().sourceAsMap().get("counter"), equalTo(1)); for (int j = 0; j < 5; j++) { - GetResponse getResponse = client().prepareGet("test", "type1", Integer.toString(i)).execute().actionGet(); + GetResponse getResponse = client().prepareGet("test", Integer.toString(i)).execute().actionGet(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getVersion(), equalTo(1L)); assertThat(((Number) getResponse.getSource().get("counter")).longValue(), equalTo(1L)); @@ -458,7 +431,6 @@ public void testBulkUpdateLargerVolume() throws Exception { for (int i = 0; i < numDocs; i++) { UpdateRequestBuilder updateBuilder = client().prepareUpdate() .setIndex("test") - .setType("type1") .setId(Integer.toString(i)) .setFetchSource("counter", null); if (i % 2 == 0) { @@ -480,7 +452,6 @@ public void testBulkUpdateLargerVolume() throws Exception { assertThat(response.getItems()[i].getId(), equalTo(Integer.toString(i))); assertThat(response.getItems()[i].getVersion(), equalTo(2L)); assertThat(response.getItems()[i].getIndex(), equalTo("test")); - assertThat(response.getItems()[i].getType(), equalTo("type1")); assertThat(response.getItems()[i].getOpType(), equalTo(OpType.UPDATE)); assertThat(response.getItems()[i].getResponse().getId(), equalTo(Integer.toString(i))); assertThat(response.getItems()[i].getResponse().getVersion(), equalTo(2L)); @@ -490,7 +461,7 @@ public void testBulkUpdateLargerVolume() throws Exception { builder = client().prepareBulk(); int maxDocs = numDocs / 2 + numDocs; for (int i = (numDocs / 2); i < maxDocs; i++) { - builder.add(client().prepareUpdate().setIndex("test").setType("type1").setId(Integer.toString(i)).setScript(script)); + builder.add(client().prepareUpdate().setIndex("test").setId(Integer.toString(i)).setScript(script)); } response = builder.execute().actionGet(); assertThat(response.hasFailures(), equalTo(true)); @@ -504,7 +475,6 @@ public void testBulkUpdateLargerVolume() throws Exception { assertThat(response.getItems()[i].getId(), equalTo(Integer.toString(id))); assertThat(response.getItems()[i].getVersion(), equalTo(3L)); assertThat(response.getItems()[i].getIndex(), equalTo("test")); - assertThat(response.getItems()[i].getType(), equalTo("type1")); assertThat(response.getItems()[i].getOpType(), equalTo(OpType.UPDATE)); } } @@ -514,7 +484,6 @@ public void testBulkUpdateLargerVolume() throws Exception { builder.add( client().prepareUpdate() .setIndex("test") - .setType("type1") .setId(Integer.toString(i)) .setScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx.op = \"none\"", Collections.emptyMap())) ); @@ -526,7 +495,6 @@ public void testBulkUpdateLargerVolume() throws Exception { assertThat(response.getItems()[i].getItemId(), equalTo(i)); assertThat(response.getItems()[i].getId(), equalTo(Integer.toString(i))); assertThat(response.getItems()[i].getIndex(), equalTo("test")); - assertThat(response.getItems()[i].getType(), equalTo("type1")); assertThat(response.getItems()[i].getOpType(), equalTo(OpType.UPDATE)); } @@ -535,7 +503,6 @@ public void testBulkUpdateLargerVolume() throws Exception { builder.add( client().prepareUpdate() .setIndex("test") - .setType("type1") .setId(Integer.toString(i)) .setScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx.op = \"delete\"", Collections.emptyMap())) ); @@ -550,10 +517,9 @@ public void testBulkUpdateLargerVolume() throws Exception { assertThat(itemResponse.getItemId(), equalTo(i)); assertThat(itemResponse.getId(), equalTo(Integer.toString(i))); assertThat(itemResponse.getIndex(), equalTo("test")); - assertThat(itemResponse.getType(), equalTo("type1")); assertThat(itemResponse.getOpType(), equalTo(OpType.UPDATE)); for (int j = 0; j < 5; j++) { - GetResponse getResponse = client().prepareGet("test", "type1", Integer.toString(i)).get(); + GetResponse getResponse = client().prepareGet("test", Integer.toString(i)).get(); assertThat(getResponse.isExists(), equalTo(false)); } } @@ -572,7 +538,7 @@ public void testBulkIndexingWhileInitializing() throws Exception { for (int i = 0; i < numDocs;) { final BulkRequestBuilder builder = client().prepareBulk(); for (int j = 0; j < bulk && i < numDocs; j++, i++) { - builder.add(client().prepareIndex("test", "type1", Integer.toString(i)).setSource("val", i)); + builder.add(client().prepareIndex("test").setId(Integer.toString(i)).setSource("val", i)); } logger.info("bulk indexing {}-{}", i - bulk, i - 1); BulkResponse response = builder.get(); @@ -604,7 +570,7 @@ public void testFailingVersionedUpdatedOnBulk() throws Exception { } BulkRequestBuilder requestBuilder = client().prepareBulk(); requestBuilder.add( - client().prepareUpdate("test", "type", "1") + client().prepareUpdate("test", "1") .setIfSeqNo(0L) .setIfPrimaryTerm(1) .setDoc(Requests.INDEX_CONTENT_TYPE, "field", threadID) @@ -648,7 +614,7 @@ public void testThatInvalidIndexNamesShouldNotBreakCompleteBulkRequest() { } else { name = "test"; } - builder.add(client().prepareIndex().setIndex(name).setType("type1").setId("1").setSource("field", 1)); + builder.add(client().prepareIndex().setIndex(name).setId("1").setSource("field", 1)); } BulkResponse bulkResponse = builder.get(); assertThat(bulkResponse.hasFailures(), is(expectFailure)); @@ -661,21 +627,21 @@ public void testThatInvalidIndexNamesShouldNotBreakCompleteBulkRequest() { // issue 6630 public void testThatFailedUpdateRequestReturnsCorrectType() throws Exception { BulkResponse indexBulkItemResponse = client().prepareBulk() - .add(new IndexRequest("test", "type", "3").source("{ \"title\" : \"Great Title of doc 3\" }", XContentType.JSON)) - .add(new IndexRequest("test", "type", "4").source("{ \"title\" : \"Great Title of doc 4\" }", XContentType.JSON)) - .add(new IndexRequest("test", "type", "5").source("{ \"title\" : \"Great Title of doc 5\" }", XContentType.JSON)) - .add(new IndexRequest("test", "type", "6").source("{ \"title\" : \"Great Title of doc 6\" }", XContentType.JSON)) + .add(new IndexRequest("test").id("3").source("{ \"title\" : \"Great Title of doc 3\" }", XContentType.JSON)) + .add(new IndexRequest("test").id("4").source("{ \"title\" : \"Great Title of doc 4\" }", XContentType.JSON)) + .add(new IndexRequest("test").id("5").source("{ \"title\" : \"Great Title of doc 5\" }", XContentType.JSON)) + .add(new IndexRequest("test").id("6").source("{ \"title\" : \"Great Title of doc 6\" }", XContentType.JSON)) .setRefreshPolicy(RefreshPolicy.IMMEDIATE) .get(); assertNoFailures(indexBulkItemResponse); BulkResponse bulkItemResponse = client().prepareBulk() - .add(new IndexRequest("test", "type", "1").source("{ \"title\" : \"Great Title of doc 1\" }", XContentType.JSON)) - .add(new IndexRequest("test", "type", "2").source("{ \"title\" : \"Great Title of doc 2\" }", XContentType.JSON)) - .add(new UpdateRequest("test", "type", "3").doc("{ \"date\" : \"2014-01-30T23:59:57\"}", XContentType.JSON)) - .add(new UpdateRequest("test", "type", "4").doc("{ \"date\" : \"2014-13-30T23:59:57\"}", XContentType.JSON)) - .add(new DeleteRequest("test", "type", "5")) - .add(new DeleteRequest("test", "type", "6")) + .add(new IndexRequest("test").id("1").source("{ \"title\" : \"Great Title of doc 1\" }", XContentType.JSON)) + .add(new IndexRequest("test").id("2").source("{ \"title\" : \"Great Title of doc 2\" }", XContentType.JSON)) + .add(new UpdateRequest("test", "3").doc("{ \"date\" : \"2014-01-30T23:59:57\"}", XContentType.JSON)) + .add(new UpdateRequest("test", "4").doc("{ \"date\" : \"2014-13-30T23:59:57\"}", XContentType.JSON)) + .add(new DeleteRequest("test", "5")) + .add(new DeleteRequest("test", "6")) .get(); assertNoFailures(indexBulkItemResponse); @@ -696,11 +662,11 @@ private static String indexOrAlias() { public void testThatMissingIndexDoesNotAbortFullBulkRequest() throws Exception { createIndex("bulkindex1", "bulkindex2"); BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(new IndexRequest("bulkindex1", "index1_type", "1").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo1")) - .add(new IndexRequest("bulkindex2", "index2_type", "1").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo2")) - .add(new IndexRequest("bulkindex2", "index2_type").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo2")) - .add(new UpdateRequest("bulkindex2", "index2_type", "2").doc(Requests.INDEX_CONTENT_TYPE, "foo", "bar")) - .add(new DeleteRequest("bulkindex2", "index2_type", "3")) + bulkRequest.add(new IndexRequest("bulkindex1").id("1").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo1")) + .add(new IndexRequest("bulkindex2").id("1").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo2")) + .add(new IndexRequest("bulkindex2").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo2")) + .add(new UpdateRequest("bulkindex2", "2").doc(Requests.INDEX_CONTENT_TYPE, "foo", "bar")) + .add(new DeleteRequest("bulkindex2", "3")) .setRefreshPolicy(RefreshPolicy.IMMEDIATE); client().bulk(bulkRequest).get(); @@ -710,11 +676,11 @@ public void testThatMissingIndexDoesNotAbortFullBulkRequest() throws Exception { assertBusy(() -> assertAcked(client().admin().indices().prepareClose("bulkindex2"))); BulkRequest bulkRequest2 = new BulkRequest(); - bulkRequest2.add(new IndexRequest("bulkindex1", "index1_type", "1").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo1")) - .add(new IndexRequest("bulkindex2", "index2_type", "1").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo2")) - .add(new IndexRequest("bulkindex2", "index2_type").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo2")) - .add(new UpdateRequest("bulkindex2", "index2_type", "2").doc(Requests.INDEX_CONTENT_TYPE, "foo", "bar")) - .add(new DeleteRequest("bulkindex2", "index2_type", "3")) + bulkRequest2.add(new IndexRequest("bulkindex1").id("1").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo1")) + .add(new IndexRequest("bulkindex2").id("1").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo2")) + .add(new IndexRequest("bulkindex2").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo2")) + .add(new UpdateRequest("bulkindex2", "2").doc(Requests.INDEX_CONTENT_TYPE, "foo", "bar")) + .add(new DeleteRequest("bulkindex2", "3")) .setRefreshPolicy(RefreshPolicy.IMMEDIATE); BulkResponse bulkResponse = client().bulk(bulkRequest2).get(); @@ -726,13 +692,13 @@ public void testThatMissingIndexDoesNotAbortFullBulkRequest() throws Exception { public void testFailedRequestsOnClosedIndex() throws Exception { createIndex("bulkindex1"); - client().prepareIndex("bulkindex1", "index1_type", "1").setSource("text", "test").get(); + client().prepareIndex("bulkindex1").setId("1").setSource("text", "test").get(); assertBusy(() -> assertAcked(client().admin().indices().prepareClose("bulkindex1"))); BulkRequest bulkRequest = new BulkRequest().setRefreshPolicy(RefreshPolicy.IMMEDIATE); - bulkRequest.add(new IndexRequest("bulkindex1", "index1_type", "1").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo1")) - .add(new UpdateRequest("bulkindex1", "index1_type", "1").doc(Requests.INDEX_CONTENT_TYPE, "foo", "bar")) - .add(new DeleteRequest("bulkindex1", "index1_type", "1")); + bulkRequest.add(new IndexRequest("bulkindex1").id("1").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo1")) + .add(new UpdateRequest("bulkindex1", "1").doc(Requests.INDEX_CONTENT_TYPE, "foo", "bar")) + .add(new DeleteRequest("bulkindex1", "1")); BulkResponse bulkResponse = client().bulk(bulkRequest).get(); assertThat(bulkResponse.hasFailures(), is(true)); @@ -749,21 +715,9 @@ public void testFailedRequestsOnClosedIndex() throws Exception { // issue 9821 public void testInvalidIndexNamesCorrectOpType() { BulkResponse bulkResponse = client().prepareBulk() - .add( - client().prepareIndex() - .setIndex("INVALID.NAME") - .setType("type1") - .setId("1") - .setSource(Requests.INDEX_CONTENT_TYPE, "field", 1) - ) - .add( - client().prepareUpdate() - .setIndex("INVALID.NAME") - .setType("type1") - .setId("1") - .setDoc(Requests.INDEX_CONTENT_TYPE, "field", randomInt()) - ) - .add(client().prepareDelete().setIndex("INVALID.NAME").setType("type1").setId("1")) + .add(client().prepareIndex().setIndex("INVALID.NAME").setId("1").setSource(Requests.INDEX_CONTENT_TYPE, "field", 1)) + .add(client().prepareUpdate().setIndex("INVALID.NAME").setId("1").setDoc(Requests.INDEX_CONTENT_TYPE, "field", randomInt())) + .add(client().prepareDelete().setIndex("INVALID.NAME").setId("1")) .get(); assertThat(bulkResponse.getItems().length, is(3)); assertThat(bulkResponse.getItems()[0].getOpType(), is(OpType.INDEX)); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/search/SearchProgressActionListenerIT.java b/server/src/internalClusterTest/java/org/opensearch/action/search/SearchProgressActionListenerIT.java index 1309bf74bf809..eb69eaaa9c2e1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/search/SearchProgressActionListenerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/search/SearchProgressActionListenerIT.java @@ -213,7 +213,7 @@ private static List createRandomIndices(Client client) { for (int i = 0; i < numIndices; i++) { String indexName = String.format(Locale.ROOT, "index-%03d", i); assertAcked(client.admin().indices().prepareCreate(indexName).get()); - client.prepareIndex(indexName, "doc", Integer.toString(i)).setSource("number", i, "foo", "bar").get(); + client.prepareIndex(indexName).setId(Integer.toString(i)).setSource("number", i, "foo", "bar").get(); } client.admin().indices().prepareRefresh("index-*").get(); ClusterSearchShardsResponse resp = client.admin().cluster().prepareSearchShards("index-*").get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/action/search/TransportSearchIT.java index 9ed76d6fe8f99..c7985d972de5e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/search/TransportSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/search/TransportSearchIT.java @@ -358,9 +358,9 @@ public void testSearchIdle() throws Exception { } } }); - client().prepareIndex("test", "_doc").setId("1").setSource("created_date", "2020-01-01").get(); - client().prepareIndex("test", "_doc").setId("2").setSource("created_date", "2020-01-02").get(); - client().prepareIndex("test", "_doc").setId("3").setSource("created_date", "2020-01-03").get(); + client().prepareIndex("test").setId("1").setSource("created_date", "2020-01-01").get(); + client().prepareIndex("test").setId("2").setSource("created_date", "2020-01-02").get(); + client().prepareIndex("test").setId("3").setSource("created_date", "2020-01-03").get(); assertBusy(() -> { SearchResponse resp = client().prepareSearch("test") .setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) @@ -491,7 +491,7 @@ private void indexSomeDocs(String indexName, int numberOfShards, int numberOfDoc createIndex(indexName, Settings.builder().put("index.number_of_shards", numberOfShards).build()); for (int i = 0; i < numberOfDocs; i++) { - IndexResponse indexResponse = client().prepareIndex(indexName, "_doc").setSource("number", randomInt()).get(); + IndexResponse indexResponse = client().prepareIndex(indexName).setSource("number", randomInt()).get(); assertEquals(RestStatus.CREATED, indexResponse.status()); } client().admin().indices().prepareRefresh(indexName).get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/support/WaitActiveShardCountIT.java b/server/src/internalClusterTest/java/org/opensearch/action/support/WaitActiveShardCountIT.java index 9e93235c29729..e919b2b85e079 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/support/WaitActiveShardCountIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/support/WaitActiveShardCountIT.java @@ -63,9 +63,10 @@ public void testReplicationWaitsForActiveShardCount() throws Exception { assertAcked(createIndexResponse); // indexing, by default, will work (waiting for one shard copy only) - client().prepareIndex("test", "type1", "1").setSource(source("1", "test"), XContentType.JSON).execute().actionGet(); + client().prepareIndex("test").setId("1").setSource(source("1", "test"), XContentType.JSON).execute().actionGet(); try { - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(source("1", "test"), XContentType.JSON) .setWaitForActiveShards(2) // wait for 2 active shard copies .setTimeout(timeValueMillis(100)) @@ -96,7 +97,8 @@ public void testReplicationWaitsForActiveShardCount() throws Exception { assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.YELLOW)); // this should work, since we now have two - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(source("1", "test"), XContentType.JSON) .setWaitForActiveShards(2) .setTimeout(timeValueSeconds(1)) @@ -104,7 +106,8 @@ public void testReplicationWaitsForActiveShardCount() throws Exception { .actionGet(); try { - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(source("1", "test"), XContentType.JSON) .setWaitForActiveShards(ActiveShardCount.ALL) .setTimeout(timeValueMillis(100)) @@ -138,7 +141,8 @@ public void testReplicationWaitsForActiveShardCount() throws Exception { assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); // this should work, since we now have all shards started - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(source("1", "test"), XContentType.JSON) .setWaitForActiveShards(ActiveShardCount.ALL) .setTimeout(timeValueSeconds(1)) diff --git a/server/src/internalClusterTest/java/org/opensearch/action/support/master/IndexingMasterFailoverIT.java b/server/src/internalClusterTest/java/org/opensearch/action/support/master/IndexingMasterFailoverIT.java index 2aa29304577b4..f8db63bc8b61d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/support/master/IndexingMasterFailoverIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/support/master/IndexingMasterFailoverIT.java @@ -97,8 +97,8 @@ public void run() { return; } for (int i = 0; i < 10; i++) { - // index data with mapping changes - IndexResponse response = client(dataNode).prepareIndex("myindex", "mytype").setSource("field_" + i, "val").get(); + // index data + IndexResponse response = client(dataNode).prepareIndex("myindex").setSource("field_" + i, "val").get(); assertEquals(DocWriteResponse.Result.CREATED, response.getResult()); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java index ac2be1a15c43e..d28dcbb924f95 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java @@ -81,21 +81,19 @@ protected Collection> nodePlugins() { public void testNoSuchDoc() throws Exception { XContentBuilder mapping = jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("field") .field("type", "text") .field("term_vector", "with_positions_offsets_payloads") .endObject() .endObject() - .endObject() .endObject(); assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping)); - client().prepareIndex("test", "type1", "666").setSource("field", "foo bar").execute().actionGet(); + client().prepareIndex("test").setId("667").setSource("field", "foo bar").execute().actionGet(); refresh(); for (int i = 0; i < 20; i++) { - ActionFuture termVector = client().termVectors(new TermVectorsRequest(indexOrAlias(), "type1", "" + i)); + ActionFuture termVector = client().termVectors(new TermVectorsRequest(indexOrAlias(), "" + i)); TermVectorsResponse actionGet = termVector.actionGet(); assertThat(actionGet, notNullValue()); assertThat(actionGet.getIndex(), equalTo("test")); @@ -119,10 +117,10 @@ public void testExistingFieldWithNoTermVectorsNoNPE() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping)); // when indexing a field that simply has a question mark, the term vectors will be null - client().prepareIndex("test", "type1", "0").setSource("existingfield", "?").execute().actionGet(); + client().prepareIndex("test").setId("0").setSource("existingfield", "?").execute().actionGet(); refresh(); ActionFuture termVector = client().termVectors( - new TermVectorsRequest(indexOrAlias(), "type1", "0").selectedFields(new String[] { "existingfield" }) + new TermVectorsRequest(indexOrAlias(), "0").selectedFields(new String[] { "existingfield" }) ); // lets see if the null term vectors are caught... @@ -147,10 +145,10 @@ public void testExistingFieldButNotInDocNPE() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping)); // when indexing a field that simply has a question mark, the term vectors will be null - client().prepareIndex("test", "type1", "0").setSource("anotherexistingfield", 1).execute().actionGet(); + client().prepareIndex("test").setId("0").setSource("anotherexistingfield", 1).execute().actionGet(); refresh(); ActionFuture termVectors = client().termVectors( - new TermVectorsRequest(indexOrAlias(), "type1", "0").selectedFields(randomBoolean() ? new String[] { "existingfield" } : null) + new TermVectorsRequest(indexOrAlias(), "0").selectedFields(randomBoolean() ? new String[] { "existingfield" } : null) .termStatistics(true) .fieldStatistics(true) ); @@ -186,14 +184,12 @@ public void testNotIndexedField() throws Exception { List indexBuilders = new ArrayList<>(); for (int i = 0; i < 6; i++) { - indexBuilders.add(client().prepareIndex().setIndex("test").setType("type1").setId(String.valueOf(i)).setSource("field" + i, i)); + indexBuilders.add(client().prepareIndex().setIndex("test").setId(String.valueOf(i)).setSource("field" + i, i)); } indexRandom(true, indexBuilders); for (int i = 0; i < 4; i++) { - TermVectorsResponse resp = client().prepareTermVectors(indexOrAlias(), "type1", String.valueOf(i)) - .setSelectedFields("field" + i) - .get(); + TermVectorsResponse resp = client().prepareTermVectors(indexOrAlias(), String.valueOf(i)).setSelectedFields("field" + i).get(); assertThat(resp, notNullValue()); assertThat(resp.isExists(), equalTo(true)); assertThat(resp.getIndex(), equalTo("test")); @@ -201,9 +197,7 @@ public void testNotIndexedField() throws Exception { } for (int i = 4; i < 6; i++) { - TermVectorsResponse resp = client().prepareTermVectors(indexOrAlias(), "type1", String.valueOf(i)) - .setSelectedFields("field" + i) - .get(); + TermVectorsResponse resp = client().prepareTermVectors(indexOrAlias(), String.valueOf(i)).setSelectedFields("field" + i).get(); assertThat(resp.getIndex(), equalTo("test")); assertThat("field" + i + " :", resp.getFields().terms("field" + i), notNullValue()); } @@ -232,7 +226,8 @@ public void testSimpleTermVectors() throws IOException { ) ); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource( jsonBuilder().startObject() .field("field", "the quick brown fox jumps over the lazy dog") @@ -245,7 +240,7 @@ public void testSimpleTermVectors() throws IOException { refresh(); } for (int i = 0; i < 10; i++) { - TermVectorsRequestBuilder resp = client().prepareTermVectors(indexOrAlias(), "type1", Integer.toString(i)) + TermVectorsRequestBuilder resp = client().prepareTermVectors(indexOrAlias(), Integer.toString(i)) .setPayloads(true) .setOffsets(true) .setPositions(true) @@ -340,7 +335,8 @@ public void testRandomSingleTermVectors() throws IOException { ) ); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "_doc", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource( jsonBuilder().startObject() .field("field", "the quick brown fox jumps over the lazy dog") @@ -362,7 +358,7 @@ public void testRandomSingleTermVectors() throws IOException { boolean isPositionsRequested = randomBoolean(); String infoString = createInfoString(isPositionsRequested, isOffsetRequested, optionString); for (int i = 0; i < 10; i++) { - TermVectorsRequestBuilder resp = client().prepareTermVectors("test", "_doc", Integer.toString(i)) + TermVectorsRequestBuilder resp = client().prepareTermVectors("test", Integer.toString(i)) .setOffsets(isOffsetRequested) .setPositions(isPositionsRequested) .setSelectedFields(); @@ -496,12 +492,12 @@ public void testSimpleTermVectorsWithGenerate() throws IOException { ensureGreen(); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)).setSource(source).execute().actionGet(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource(source).execute().actionGet(); refresh(); } for (int i = 0; i < 10; i++) { - TermVectorsResponse response = client().prepareTermVectors("test", "type1", Integer.toString(i)) + TermVectorsResponse response = client().prepareTermVectors("test", Integer.toString(i)) .setPayloads(true) .setOffsets(true) .setPositions(true) @@ -579,9 +575,7 @@ public void testDuelWithAndWithoutTermVectors() throws IOException, ExecutionExc List indexBuilders = new ArrayList<>(); for (String indexName : indexNames) { for (int id = 0; id < content.length; id++) { - indexBuilders.add( - client().prepareIndex().setIndex(indexName).setType("type1").setId(String.valueOf(id)).setSource("field1", content[id]) - ); + indexBuilders.add(client().prepareIndex().setIndex(indexName).setId(String.valueOf(id)).setSource("field1", content[id])); } } indexRandom(true, indexBuilders); @@ -590,7 +584,7 @@ public void testDuelWithAndWithoutTermVectors() throws IOException, ExecutionExc for (int id = 0; id < content.length; id++) { Fields[] fields = new Fields[2]; for (int j = 0; j < indexNames.length; j++) { - TermVectorsResponse resp = client().prepareTermVectors(indexNames[j], "type1", String.valueOf(id)) + TermVectorsResponse resp = client().prepareTermVectors(indexNames[j], String.valueOf(id)) .setOffsets(true) .setPositions(true) .setSelectedFields("field1") @@ -658,10 +652,10 @@ public void testSimpleWildCards() throws IOException { assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping)); ensureGreen(); - client().prepareIndex("test", "type1", "0").setSource(source).get(); + client().prepareIndex("test").setId("0").setSource(source).get(); refresh(); - TermVectorsResponse response = client().prepareTermVectors(indexOrAlias(), "type1", "0").setSelectedFields("field*").get(); + TermVectorsResponse response = client().prepareTermVectors(indexOrAlias(), "0").setSelectedFields("field*").get(); assertThat("Doc doesn't exists but should", response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat("All term vectors should have been generated", response.getFields().size(), equalTo(numFields)); @@ -684,15 +678,13 @@ public void testArtificialVsExisting() throws ExecutionException, InterruptedExc List indexBuilders = new ArrayList<>(); for (int i = 0; i < content.length; i++) { - indexBuilders.add( - client().prepareIndex().setIndex("test").setType("type1").setId(String.valueOf(i)).setSource("field1", content[i]) - ); + indexBuilders.add(client().prepareIndex().setIndex("test").setId(String.valueOf(i)).setSource("field1", content[i])); } indexRandom(true, indexBuilders); for (int i = 0; i < content.length; i++) { // request tvs from existing document - TermVectorsResponse respExisting = client().prepareTermVectors("test", "type1", String.valueOf(i)) + TermVectorsResponse respExisting = client().prepareTermVectors("test", String.valueOf(i)) .setOffsets(true) .setPositions(true) .setFieldStatistics(true) @@ -703,7 +695,6 @@ public void testArtificialVsExisting() throws ExecutionException, InterruptedExc // request tvs from artificial document TermVectorsResponse respArtificial = client().prepareTermVectors() .setIndex("test") - .setType("type1") .setRouting(String.valueOf(i)) // ensure we get the stats from the same shard as existing doc .setDoc(jsonBuilder().startObject().field("field1", content[i]).endObject()) .setOffsets(true) @@ -728,7 +719,6 @@ public void testArtificialNoDoc() throws IOException { String text = "the quick brown fox jumps over the lazy dog"; TermVectorsResponse resp = client().prepareTermVectors() .setIndex("test") - .setType("type1") .setDoc(jsonBuilder().startObject().field("field1", text).endObject()) .setOffsets(true) .setPositions(true) @@ -776,7 +766,7 @@ public void testPerFieldAnalyzer() throws IOException { ensureGreen(); // index a single document with prepared source - client().prepareIndex("test", "type1", "0").setSource(source).get(); + client().prepareIndex("test").setId("0").setSource(source).get(); refresh(); // create random per_field_analyzer and selected fields @@ -798,15 +788,13 @@ public void testPerFieldAnalyzer() throws IOException { } // selected fields not specified - TermVectorsResponse response = client().prepareTermVectors(indexOrAlias(), "type1", "0") - .setPerFieldAnalyzer(perFieldAnalyzer) - .get(); + TermVectorsResponse response = client().prepareTermVectors(indexOrAlias(), "0").setPerFieldAnalyzer(perFieldAnalyzer).get(); // should return all fields that have terms vectors, some with overridden analyzer checkAnalyzedFields(response.getFields(), withTermVectors, perFieldAnalyzer); // selected fields specified including some not in the mapping - response = client().prepareTermVectors(indexOrAlias(), "type1", "0") + response = client().prepareTermVectors(indexOrAlias(), "0") .setSelectedFields(selectedFields.toArray(Strings.EMPTY_ARRAY)) .setPerFieldAnalyzer(perFieldAnalyzer) .get(); @@ -848,27 +836,27 @@ public void testTermVectorsWithVersion() { assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSettings(Settings.builder().put("index.refresh_interval", -1))); ensureGreen(); - TermVectorsResponse response = client().prepareTermVectors("test", "type1", "1").get(); + TermVectorsResponse response = client().prepareTermVectors("test", "1").get(); assertThat(response.isExists(), equalTo(false)); logger.info("--> index doc 1"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2").get(); // From translog: // version 0 means ignore version, which is the default - response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).get(); + response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(Versions.MATCH_ANY).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getVersion(), equalTo(1L)); - response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(1).get(); + response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(1).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getVersion(), equalTo(1L)); try { - client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).get(); + client().prepareGet(indexOrAlias(), "1").setVersion(2).get(); fail(); } catch (VersionConflictEngineException e) { // all good @@ -878,45 +866,45 @@ public void testTermVectorsWithVersion() { refresh(); // version 0 means ignore version, which is the default - response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get(); + response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(1L)); - response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(1).setRealtime(false).get(); + response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(1).setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(1L)); try { - client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).setRealtime(false).get(); + client().prepareGet(indexOrAlias(), "1").setVersion(2).setRealtime(false).get(); fail(); } catch (VersionConflictEngineException e) { // all good } logger.info("--> index doc 1 again, so increasing the version"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2").get(); // From translog: // version 0 means ignore version, which is the default - response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).get(); + response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(Versions.MATCH_ANY).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(2L)); try { - client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).get(); + client().prepareGet(indexOrAlias(), "1").setVersion(1).get(); fail(); } catch (VersionConflictEngineException e) { // all good } - response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(2).get(); + response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(2).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); @@ -926,20 +914,20 @@ public void testTermVectorsWithVersion() { refresh(); // version 0 means ignore version, which is the default - response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get(); + response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(2L)); try { - client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).setRealtime(false).get(); + client().prepareGet(indexOrAlias(), "1").setVersion(1).setRealtime(false).get(); fail(); } catch (VersionConflictEngineException e) { // all good } - response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(2).setRealtime(false).get(); + response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(2).setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); @@ -961,7 +949,7 @@ public void testFilterLength() throws ExecutionException, InterruptedException, } tags.add(tag); } - indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("tags", tags)); + indexRandom(true, client().prepareIndex("test").setId("1").setSource("tags", tags)); logger.info("Checking best tags by longest to shortest size ..."); TermVectorsRequest.FilterSettings filterSettings = new TermVectorsRequest.FilterSettings(); @@ -969,7 +957,7 @@ public void testFilterLength() throws ExecutionException, InterruptedException, TermVectorsResponse response; for (int i = 0; i < numTerms; i++) { filterSettings.minWordLength = numTerms - i; - response = client().prepareTermVectors("test", "type1", "1") + response = client().prepareTermVectors("test", "1") .setSelectedFields("tags") .setFieldStatistics(true) .setTermStatistics(true) @@ -997,14 +985,14 @@ public void testFilterTermFreq() throws ExecutionException, InterruptedException } uniqueTags.add(tag); } - indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("tags", tags)); + indexRandom(true, client().prepareIndex("test").setId("1").setSource("tags", tags)); logger.info("Checking best tags by highest to lowest term freq ..."); TermVectorsRequest.FilterSettings filterSettings = new TermVectorsRequest.FilterSettings(); TermVectorsResponse response; for (int i = 0; i < numTerms; i++) { filterSettings.maxNumTerms = i + 1; - response = client().prepareTermVectors("test", "type1", "1") + response = client().prepareTermVectors("test", "1") .setSelectedFields("tags") .setFieldStatistics(true) .setTermStatistics(true) @@ -1028,7 +1016,7 @@ public void testFilterDocFreq() throws ExecutionException, InterruptedException, List tags = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { tags.add("tag_" + i); - builders.add(client().prepareIndex("test", "type1", i + "").setSource("tags", tags)); + builders.add(client().prepareIndex("test").setId(i + "").setSource("tags", tags)); } indexRandom(true, builders); @@ -1037,7 +1025,7 @@ public void testFilterDocFreq() throws ExecutionException, InterruptedException, TermVectorsResponse response; for (int i = 0; i < numDocs; i++) { filterSettings.maxNumTerms = i + 1; - response = client().prepareTermVectors("test", "type1", (numDocs - 1) + "") + response = client().prepareTermVectors("test", (numDocs - 1) + "") .setSelectedFields("tags") .setFieldStatistics(true) .setTermStatistics(true) @@ -1056,7 +1044,7 @@ public void testArtificialDocWithPreference() throws InterruptedException, IOExc ensureGreen(); // index document - indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "random permutation")); + indexRandom(true, client().prepareIndex("test").setId("1").setSource("field1", "random permutation")); // Get search shards ClusterSearchShardsResponse searchShardsResponse = client().admin().cluster().prepareSearchShards("test").get(); @@ -1068,7 +1056,6 @@ public void testArtificialDocWithPreference() throws InterruptedException, IOExc for (Integer shardId : shardIds) { TermVectorsResponse tvResponse = client().prepareTermVectors() .setIndex("test") - .setType("type1") .setPreference("_shards:" + shardId) .setDoc(jsonBuilder().startObject().field("field1", "random permutation").endObject()) .setFieldStatistics(true) @@ -1120,7 +1107,6 @@ public void testWithKeywordAndNormalizer() throws IOException, ExecutionExceptio indexBuilders.add( client().prepareIndex() .setIndex(indexName) - .setType("type1") .setId(String.valueOf(id)) .setSource("field1", content[id], "field2", content[id]) ); @@ -1132,7 +1118,7 @@ public void testWithKeywordAndNormalizer() throws IOException, ExecutionExceptio for (int id = 0; id < content.length; id++) { Fields[] fields = new Fields[2]; for (int j = 0; j < indexNames.length; j++) { - TermVectorsResponse resp = client().prepareTermVectors(indexNames[j], "type1", String.valueOf(id)) + TermVectorsResponse resp = client().prepareTermVectors(indexNames[j], String.valueOf(id)) .setOffsets(true) .setPositions(true) .setSelectedFields("field1", "field2") diff --git a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/MultiTermVectorsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/MultiTermVectorsIT.java index da9d7876223a9..91d280a9c4771 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/MultiTermVectorsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/MultiTermVectorsIT.java @@ -83,7 +83,7 @@ public void testDuelESLucene() throws Exception { } public void testMissingIndexThrowsMissingIndex() throws Exception { - TermVectorsRequestBuilder requestBuilder = client().prepareTermVectors("testX", "typeX", Integer.toString(1)); + TermVectorsRequestBuilder requestBuilder = client().prepareTermVectors("testX", Integer.toString(1)); MultiTermVectorsRequestBuilder mtvBuilder = client().prepareMultiTermVectors(); mtvBuilder.add(requestBuilder.request()); MultiTermVectorsResponse response = mtvBuilder.execute().actionGet(); @@ -96,19 +96,19 @@ public void testMultiTermVectorsWithVersion() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSettings(Settings.builder().put("index.refresh_interval", -1))); ensureGreen(); - MultiTermVectorsResponse response = client().prepareMultiTermVectors().add(indexOrAlias(), "type1", "1").get(); + MultiTermVectorsResponse response = client().prepareMultiTermVectors().add(indexOrAlias(), "1").get(); assertThat(response.getResponses().length, equalTo(1)); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(false)); for (int i = 0; i < 3; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value" + i).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).get(); } // Version from translog response = client().prepareMultiTermVectors() - .add(new TermVectorsRequest(indexOrAlias(), "type1", "1").selectedFields("field").version(Versions.MATCH_ANY)) - .add(new TermVectorsRequest(indexOrAlias(), "type1", "1").selectedFields("field").version(1)) - .add(new TermVectorsRequest(indexOrAlias(), "type1", "1").selectedFields("field").version(2)) + .add(new TermVectorsRequest(indexOrAlias(), "1").selectedFields("field").version(Versions.MATCH_ANY)) + .add(new TermVectorsRequest(indexOrAlias(), "1").selectedFields("field").version(1)) + .add(new TermVectorsRequest(indexOrAlias(), "1").selectedFields("field").version(2)) .get(); assertThat(response.getResponses().length, equalTo(3)); // [0] version doesn't matter, which is the default @@ -130,9 +130,9 @@ public void testMultiTermVectorsWithVersion() throws Exception { // Version from Lucene index refresh(); response = client().prepareMultiTermVectors() - .add(new TermVectorsRequest(indexOrAlias(), "type1", "1").selectedFields("field").version(Versions.MATCH_ANY).realtime(false)) - .add(new TermVectorsRequest(indexOrAlias(), "type1", "1").selectedFields("field").version(1).realtime(false)) - .add(new TermVectorsRequest(indexOrAlias(), "type1", "1").selectedFields("field").version(2).realtime(false)) + .add(new TermVectorsRequest(indexOrAlias(), "1").selectedFields("field").version(Versions.MATCH_ANY).realtime(false)) + .add(new TermVectorsRequest(indexOrAlias(), "1").selectedFields("field").version(1).realtime(false)) + .add(new TermVectorsRequest(indexOrAlias(), "1").selectedFields("field").version(2).realtime(false)) .get(); assertThat(response.getResponses().length, equalTo(3)); // [0] version doesn't matter, which is the default @@ -150,14 +150,14 @@ public void testMultiTermVectorsWithVersion() throws Exception { assertThat(response.getResponses()[2].getFailure().getCause().getCause(), instanceOf(VersionConflictEngineException.class)); for (int i = 0; i < 3; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value" + i).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).get(); } // Version from translog response = client().prepareMultiTermVectors() - .add(new TermVectorsRequest(indexOrAlias(), "type1", "2").selectedFields("field").version(Versions.MATCH_ANY)) - .add(new TermVectorsRequest(indexOrAlias(), "type1", "2").selectedFields("field").version(1)) - .add(new TermVectorsRequest(indexOrAlias(), "type1", "2").selectedFields("field").version(2)) + .add(new TermVectorsRequest(indexOrAlias(), "2").selectedFields("field").version(Versions.MATCH_ANY)) + .add(new TermVectorsRequest(indexOrAlias(), "2").selectedFields("field").version(1)) + .add(new TermVectorsRequest(indexOrAlias(), "2").selectedFields("field").version(2)) .get(); assertThat(response.getResponses().length, equalTo(3)); // [0] version doesn't matter, which is the default @@ -180,9 +180,9 @@ public void testMultiTermVectorsWithVersion() throws Exception { // Version from Lucene index refresh(); response = client().prepareMultiTermVectors() - .add(new TermVectorsRequest(indexOrAlias(), "type1", "2").selectedFields("field").version(Versions.MATCH_ANY)) - .add(new TermVectorsRequest(indexOrAlias(), "type1", "2").selectedFields("field").version(1)) - .add(new TermVectorsRequest(indexOrAlias(), "type1", "2").selectedFields("field").version(2)) + .add(new TermVectorsRequest(indexOrAlias(), "2").selectedFields("field").version(Versions.MATCH_ANY)) + .add(new TermVectorsRequest(indexOrAlias(), "2").selectedFields("field").version(1)) + .add(new TermVectorsRequest(indexOrAlias(), "2").selectedFields("field").version(2)) .get(); assertThat(response.getResponses().length, equalTo(3)); // [0] version doesn't matter, which is the default diff --git a/server/src/internalClusterTest/java/org/opensearch/aliases/IndexAliasesIT.java b/server/src/internalClusterTest/java/org/opensearch/aliases/IndexAliasesIT.java index fa2ebe3fa2108..2d01e4c031538 100644 --- a/server/src/internalClusterTest/java/org/opensearch/aliases/IndexAliasesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/aliases/IndexAliasesIT.java @@ -117,7 +117,7 @@ public void testAliases() throws Exception { logger.info("--> indexing against [alias1], should fail now"); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, - () -> client().index(indexRequest("alias1").type("type1").id("1").source(source("2", "test"), XContentType.JSON)).actionGet() + () -> client().index(indexRequest("alias1").id("1").source(source("2", "test"), XContentType.JSON)).actionGet() ); assertThat( exception.getMessage(), @@ -134,9 +134,8 @@ public void testAliases() throws Exception { }); logger.info("--> indexing against [alias1], should work now"); - IndexResponse indexResponse = client().index( - indexRequest("alias1").type("type1").id("1").source(source("1", "test"), XContentType.JSON) - ).actionGet(); + IndexResponse indexResponse = client().index(indexRequest("alias1").id("1").source(source("1", "test"), XContentType.JSON)) + .actionGet(); assertThat(indexResponse.getIndex(), equalTo("test")); logger.info("--> creating index [test_x]"); @@ -152,7 +151,7 @@ public void testAliases() throws Exception { logger.info("--> indexing against [alias1], should fail now"); exception = expectThrows( IllegalArgumentException.class, - () -> client().index(indexRequest("alias1").type("type1").id("1").source(source("2", "test"), XContentType.JSON)).actionGet() + () -> client().index(indexRequest("alias1").id("1").source(source("2", "test"), XContentType.JSON)).actionGet() ); assertThat( exception.getMessage(), @@ -164,10 +163,7 @@ public void testAliases() throws Exception { ); logger.info("--> deleting against [alias1], should fail now"); - exception = expectThrows( - IllegalArgumentException.class, - () -> client().delete(deleteRequest("alias1").type("type1").id("1")).actionGet() - ); + exception = expectThrows(IllegalArgumentException.class, () -> client().delete(deleteRequest("alias1").id("1")).actionGet()); assertThat( exception.getMessage(), equalTo( @@ -183,8 +179,7 @@ public void testAliases() throws Exception { }); logger.info("--> indexing against [alias1], should work now"); - indexResponse = client().index(indexRequest("alias1").type("type1").id("1").source(source("1", "test"), XContentType.JSON)) - .actionGet(); + indexResponse = client().index(indexRequest("alias1").id("1").source(source("1", "test"), XContentType.JSON)).actionGet(); assertThat(indexResponse.getIndex(), equalTo("test")); assertAliasesVersionIncreases("test_x", () -> { @@ -193,12 +188,11 @@ public void testAliases() throws Exception { }); logger.info("--> indexing against [alias1], should work now"); - indexResponse = client().index(indexRequest("alias1").type("type1").id("1").source(source("1", "test"), XContentType.JSON)) - .actionGet(); + indexResponse = client().index(indexRequest("alias1").id("1").source(source("1", "test"), XContentType.JSON)).actionGet(); assertThat(indexResponse.getIndex(), equalTo("test_x")); logger.info("--> deleting against [alias1], should fail now"); - DeleteResponse deleteResponse = client().delete(deleteRequest("alias1").type("type1").id("1")).actionGet(); + DeleteResponse deleteResponse = client().delete(deleteRequest("alias1").id("1")).actionGet(); assertThat(deleteResponse.getIndex(), equalTo("test_x")); assertAliasesVersionIncreases("test_x", () -> { @@ -207,8 +201,7 @@ public void testAliases() throws Exception { }); logger.info("--> indexing against [alias1], should work against [test_x]"); - indexResponse = client().index(indexRequest("alias1").type("type1").id("1").source(source("1", "test"), XContentType.JSON)) - .actionGet(); + indexResponse = client().index(indexRequest("alias1").id("1").source(source("1", "test"), XContentType.JSON)).actionGet(); assertThat(indexResponse.getIndex(), equalTo("test_x")); } @@ -290,28 +283,16 @@ public void testSearchingFilteringAliasesSingleIndex() throws Exception { logger.info("--> indexing against [test]"); client().index( - indexRequest("test").type("type1") - .id("1") - .source(source("1", "foo test"), XContentType.JSON) - .setRefreshPolicy(RefreshPolicy.IMMEDIATE) + indexRequest("test").id("1").source(source("1", "foo test"), XContentType.JSON).setRefreshPolicy(RefreshPolicy.IMMEDIATE) ).actionGet(); client().index( - indexRequest("test").type("type1") - .id("2") - .source(source("2", "bar test"), XContentType.JSON) - .setRefreshPolicy(RefreshPolicy.IMMEDIATE) + indexRequest("test").id("2").source(source("2", "bar test"), XContentType.JSON).setRefreshPolicy(RefreshPolicy.IMMEDIATE) ).actionGet(); client().index( - indexRequest("test").type("type1") - .id("3") - .source(source("3", "baz test"), XContentType.JSON) - .setRefreshPolicy(RefreshPolicy.IMMEDIATE) + indexRequest("test").id("3").source(source("3", "baz test"), XContentType.JSON).setRefreshPolicy(RefreshPolicy.IMMEDIATE) ).actionGet(); client().index( - indexRequest("test").type("type1") - .id("4") - .source(source("4", "something else"), XContentType.JSON) - .setRefreshPolicy(RefreshPolicy.IMMEDIATE) + indexRequest("test").id("4").source(source("4", "something else"), XContentType.JSON).setRefreshPolicy(RefreshPolicy.IMMEDIATE) ).actionGet(); logger.info("--> checking single filtering alias search"); @@ -408,16 +389,16 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { ); logger.info("--> indexing against [test1]"); - client().index(indexRequest("test1").type("type1").id("1").source(source("1", "foo test"), XContentType.JSON)).get(); - client().index(indexRequest("test1").type("type1").id("2").source(source("2", "bar test"), XContentType.JSON)).get(); - client().index(indexRequest("test1").type("type1").id("3").source(source("3", "baz test"), XContentType.JSON)).get(); - client().index(indexRequest("test1").type("type1").id("4").source(source("4", "something else"), XContentType.JSON)).get(); + client().index(indexRequest("test1").id("1").source(source("1", "foo test"), XContentType.JSON)).get(); + client().index(indexRequest("test1").id("2").source(source("2", "bar test"), XContentType.JSON)).get(); + client().index(indexRequest("test1").id("3").source(source("3", "baz test"), XContentType.JSON)).get(); + client().index(indexRequest("test1").id("4").source(source("4", "something else"), XContentType.JSON)).get(); logger.info("--> indexing against [test2]"); - client().index(indexRequest("test2").type("type1").id("5").source(source("5", "foo test"), XContentType.JSON)).get(); - client().index(indexRequest("test2").type("type1").id("6").source(source("6", "bar test"), XContentType.JSON)).get(); - client().index(indexRequest("test2").type("type1").id("7").source(source("7", "baz test"), XContentType.JSON)).get(); - client().index(indexRequest("test2").type("type1").id("8").source(source("8", "something else"), XContentType.JSON)).get(); + client().index(indexRequest("test2").id("5").source(source("5", "foo test"), XContentType.JSON)).get(); + client().index(indexRequest("test2").id("6").source(source("6", "bar test"), XContentType.JSON)).get(); + client().index(indexRequest("test2").id("7").source(source("7", "baz test"), XContentType.JSON)).get(); + client().index(indexRequest("test2").id("8").source(source("8", "something else"), XContentType.JSON)).get(); refresh(); @@ -489,9 +470,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { logger.info("--> creating indices"); createIndex("test1", "test2", "test3"); - assertAcked( - client().admin().indices().preparePutMapping("test1", "test2", "test3").setType("type1").setSource("name", "type=text") - ); + assertAcked(client().admin().indices().preparePutMapping("test1", "test2", "test3").setSource("name", "type=text")); ensureGreen(); @@ -524,17 +503,17 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); logger.info("--> indexing against [test1]"); - client().index(indexRequest("test1").type("type1").id("11").source(source("11", "foo test1"), XContentType.JSON)).get(); - client().index(indexRequest("test1").type("type1").id("12").source(source("12", "bar test1"), XContentType.JSON)).get(); - client().index(indexRequest("test1").type("type1").id("13").source(source("13", "baz test1"), XContentType.JSON)).get(); + client().index(indexRequest("test1").id("11").source(source("11", "foo test1"), XContentType.JSON)).get(); + client().index(indexRequest("test1").id("12").source(source("12", "bar test1"), XContentType.JSON)).get(); + client().index(indexRequest("test1").id("13").source(source("13", "baz test1"), XContentType.JSON)).get(); - client().index(indexRequest("test2").type("type1").id("21").source(source("21", "foo test2"), XContentType.JSON)).get(); - client().index(indexRequest("test2").type("type1").id("22").source(source("22", "bar test2"), XContentType.JSON)).get(); - client().index(indexRequest("test2").type("type1").id("23").source(source("23", "baz test2"), XContentType.JSON)).get(); + client().index(indexRequest("test2").id("21").source(source("21", "foo test2"), XContentType.JSON)).get(); + client().index(indexRequest("test2").id("22").source(source("22", "bar test2"), XContentType.JSON)).get(); + client().index(indexRequest("test2").id("23").source(source("23", "baz test2"), XContentType.JSON)).get(); - client().index(indexRequest("test3").type("type1").id("31").source(source("31", "foo test3"), XContentType.JSON)).get(); - client().index(indexRequest("test3").type("type1").id("32").source(source("32", "bar test3"), XContentType.JSON)).get(); - client().index(indexRequest("test3").type("type1").id("33").source(source("33", "baz test3"), XContentType.JSON)).get(); + client().index(indexRequest("test3").id("31").source(source("31", "foo test3"), XContentType.JSON)).get(); + client().index(indexRequest("test3").id("32").source(source("32", "bar test3"), XContentType.JSON)).get(); + client().index(indexRequest("test3").id("33").source(source("33", "baz test3"), XContentType.JSON)).get(); refresh(); @@ -647,16 +626,16 @@ public void testDeletingByQueryFilteringAliases() throws Exception { ); logger.info("--> indexing against [test1]"); - client().index(indexRequest("test1").type("type1").id("1").source(source("1", "foo test"), XContentType.JSON)).get(); - client().index(indexRequest("test1").type("type1").id("2").source(source("2", "bar test"), XContentType.JSON)).get(); - client().index(indexRequest("test1").type("type1").id("3").source(source("3", "baz test"), XContentType.JSON)).get(); - client().index(indexRequest("test1").type("type1").id("4").source(source("4", "something else"), XContentType.JSON)).get(); + client().index(indexRequest("test1").id("1").source(source("1", "foo test"), XContentType.JSON)).get(); + client().index(indexRequest("test1").id("2").source(source("2", "bar test"), XContentType.JSON)).get(); + client().index(indexRequest("test1").id("3").source(source("3", "baz test"), XContentType.JSON)).get(); + client().index(indexRequest("test1").id("4").source(source("4", "something else"), XContentType.JSON)).get(); logger.info("--> indexing against [test2]"); - client().index(indexRequest("test2").type("type1").id("5").source(source("5", "foo test"), XContentType.JSON)).get(); - client().index(indexRequest("test2").type("type1").id("6").source(source("6", "bar test"), XContentType.JSON)).get(); - client().index(indexRequest("test2").type("type1").id("7").source(source("7", "baz test"), XContentType.JSON)).get(); - client().index(indexRequest("test2").type("type1").id("8").source(source("8", "something else"), XContentType.JSON)).get(); + client().index(indexRequest("test2").id("5").source(source("5", "foo test"), XContentType.JSON)).get(); + client().index(indexRequest("test2").id("6").source(source("6", "bar test"), XContentType.JSON)).get(); + client().index(indexRequest("test2").id("7").source(source("7", "baz test"), XContentType.JSON)).get(); + client().index(indexRequest("test2").id("8").source(source("8", "something else"), XContentType.JSON)).get(); refresh(); @@ -744,7 +723,7 @@ public void testWaitForAliasCreationMultipleShards() throws Exception { for (int i = 0; i < 10; i++) { final String aliasName = "alias" + i; assertAliasesVersionIncreases("test", () -> assertAcked(admin().indices().prepareAliases().addAlias("test", aliasName))); - client().index(indexRequest(aliasName).type("type1").id("1").source(source("1", "test"), XContentType.JSON)).get(); + client().index(indexRequest(aliasName).id("1").source(source("1", "test"), XContentType.JSON)).get(); } } @@ -765,7 +744,7 @@ public void testWaitForAliasCreationSingleShard() throws Exception { for (int i = 0; i < 10; i++) { final String aliasName = "alias" + i; assertAliasesVersionIncreases("test", () -> assertAcked(admin().indices().prepareAliases().addAlias("test", aliasName))); - client().index(indexRequest(aliasName).type("type1").id("1").source(source("1", "test"), XContentType.JSON)).get(); + client().index(indexRequest(aliasName).id("1").source(source("1", "test"), XContentType.JSON)).get(); } } @@ -787,8 +766,7 @@ public void run() { "test", () -> assertAcked(admin().indices().prepareAliases().addAlias("test", aliasName)) ); - client().index(indexRequest(aliasName).type("type1").id("1").source(source("1", "test"), XContentType.JSON)) - .actionGet(); + client().index(indexRequest(aliasName).id("1").source(source("1", "test"), XContentType.JSON)).actionGet(); } }); } @@ -882,11 +860,7 @@ public void testIndicesGetAliases() throws Exception { createIndex("bazbar"); assertAcked( - client().admin() - .indices() - .preparePutMapping("foobar", "test", "test123", "foobarbaz", "bazbar") - .setType("type") - .setSource("field", "type=text") + client().admin().indices().preparePutMapping("foobar", "test", "test123", "foobarbaz", "bazbar").setSource("field", "type=text") ); ensureGreen(); @@ -1226,7 +1200,7 @@ public void testAliasFilterWithNowInRangeFilterAndQuery() throws Exception { final int numDocs = scaledRandomIntBetween(5, 52); for (int i = 1; i <= numDocs; i++) { - client().prepareIndex("my-index", "my-type").setSource("timestamp", "2016-12-12").get(); + client().prepareIndex("my-index").setSource("timestamp", "2016-12-12").get(); if (i % 2 == 0) { refresh(); SearchResponse response = client().prepareSearch("filter1").get(); @@ -1325,7 +1299,7 @@ public void testAliasActionRemoveIndex() throws InterruptedException, ExecutionE public void testRemoveIndexAndReplaceWithAlias() throws InterruptedException, ExecutionException { assertAcked(client().admin().indices().prepareCreate("test")); - indexRandom(true, client().prepareIndex("test_2", "test", "test").setSource("test", "test")); + indexRandom(true, client().prepareIndex("test_2").setId("test").setSource("test", "test")); assertAliasesVersionIncreases( "test_2", () -> assertAcked(client().admin().indices().prepareAliases().addAlias("test_2", "test").removeIndex("test")) diff --git a/server/src/internalClusterTest/java/org/opensearch/blocks/SimpleBlocksIT.java b/server/src/internalClusterTest/java/org/opensearch/blocks/SimpleBlocksIT.java index 7bb8f0cc318af..8ede3e25b2e1a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/blocks/SimpleBlocksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/blocks/SimpleBlocksIT.java @@ -162,7 +162,7 @@ private void canNotCreateIndex(String index) { private void canIndexDocument(String index) { try { - IndexRequestBuilder builder = client().prepareIndex(index, "zzz"); + IndexRequestBuilder builder = client().prepareIndex(index); builder.setSource("foo", "bar"); IndexResponse r = builder.execute().actionGet(); assertThat(r, notNullValue()); @@ -173,7 +173,7 @@ private void canIndexDocument(String index) { private void canNotIndexDocument(String index) { try { - IndexRequestBuilder builder = client().prepareIndex(index, "zzz"); + IndexRequestBuilder builder = client().prepareIndex(index); builder.setSource("foo", "bar"); builder.execute().actionGet(); fail(); @@ -306,7 +306,7 @@ public void testAddIndexBlock() throws Exception { false, randomBoolean(), IntStream.range(0, nbDocs) - .mapToObj(i -> client().prepareIndex(indexName, "zzz").setId(String.valueOf(i)).setSource("num", i)) + .mapToObj(i -> client().prepareIndex(indexName).setId(String.valueOf(i)).setSource("num", i)) .collect(toList()) ); @@ -332,7 +332,7 @@ public void testSameBlockTwice() throws Exception { false, randomBoolean(), IntStream.range(0, randomIntBetween(1, 10)) - .mapToObj(i -> client().prepareIndex(indexName, "zzz").setId(String.valueOf(i)).setSource("num", i)) + .mapToObj(i -> client().prepareIndex(indexName).setId(String.valueOf(i)).setSource("num", i)) .collect(toList()) ); } @@ -378,7 +378,7 @@ public void testConcurrentAddBlock() throws InterruptedException { false, randomBoolean(), IntStream.range(0, nbDocs) - .mapToObj(i -> client().prepareIndex(indexName, "zzz").setId(String.valueOf(i)).setSource("num", i)) + .mapToObj(i -> client().prepareIndex(indexName).setId(String.valueOf(i)).setSource("num", i)) .collect(toList()) ); ensureYellowAndNoInitializingShards(indexName); @@ -460,7 +460,7 @@ public void testAddBlockWhileDeletingIndices() throws Exception { false, randomBoolean(), IntStream.range(0, 10) - .mapToObj(n -> client().prepareIndex(indexName, "zzz").setId(String.valueOf(n)).setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setId(String.valueOf(n)).setSource("num", n)) .collect(toList()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/broadcast/BroadcastActionsIT.java b/server/src/internalClusterTest/java/org/opensearch/broadcast/BroadcastActionsIT.java index c45155809a5ea..f9f99eb2662b0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/broadcast/BroadcastActionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/broadcast/BroadcastActionsIT.java @@ -40,7 +40,7 @@ import java.io.IOException; import static org.opensearch.client.Requests.indexRequest; -import static org.opensearch.index.query.QueryBuilders.termQuery; +import static org.opensearch.index.query.QueryBuilders.matchAllQuery; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; @@ -57,16 +57,16 @@ public void testBroadcastOperations() throws IOException { NumShards numShards = getNumShards("test"); logger.info("Running Cluster Health"); - client().index(indexRequest("test").type("type1").id("1").source(source("1", "test"))).actionGet(); + client().index(indexRequest("test").id("1").source(source("1", "test"))).actionGet(); flush(); - client().index(indexRequest("test").type("type1").id("2").source(source("2", "test"))).actionGet(); + client().index(indexRequest("test").id("2").source(source("2", "test"))).actionGet(); refresh(); logger.info("Count"); // check count for (int i = 0; i < 5; i++) { // test successful - SearchResponse countResponse = client().prepareSearch("test").setSize(0).setQuery(termQuery("_type", "type1")).get(); + SearchResponse countResponse = client().prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get(); assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L)); assertThat(countResponse.getTotalShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); diff --git a/server/src/internalClusterTest/java/org/opensearch/client/documentation/IndicesDocumentationIT.java b/server/src/internalClusterTest/java/org/opensearch/client/documentation/IndicesDocumentationIT.java deleted file mode 100644 index 6108e8ee8efe3..0000000000000 --- a/server/src/internalClusterTest/java/org/opensearch/client/documentation/IndicesDocumentationIT.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.client.documentation; - -import org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.opensearch.client.Client; -import org.opensearch.cluster.metadata.MappingMetadata; -import org.opensearch.common.collect.ImmutableOpenMap; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.test.OpenSearchIntegTestCase; - -import static java.util.Collections.singletonMap; -import static org.hamcrest.Matchers.instanceOf; - -/** - * This class is used to generate the Java indices administration documentation. - * You need to wrap your code between two tags like: - * // tag::example[] - * // end::example[] - * - * Where example is your tag name. - * - * Then in the documentation, you can extract what is between tag and end tags - * with ["source","java",subs="attributes,callouts,macros"] - * -------------------------------------------------- - * include-tagged::{client-tests}/IndicesDocumentationIT.java[your-example-tag-here] - * -------------------------------------------------- - */ -public class IndicesDocumentationIT extends OpenSearchIntegTestCase { - - /** - * This test method is used to generate the Put Mapping Java Indices API documentation - * at "docs/java-api/admin/indices/put-mapping.asciidoc" so the documentation gets tested - * so that it compiles and runs without throwing errors at runtime. - */ - public void testPutMappingDocumentation() throws Exception { - Client client = client(); - - // tag::index-with-mapping - client.admin().indices().prepareCreate("twitter") // <1> - .addMapping("_doc", "message", "type=text") // <2> - .get(); - // end::index-with-mapping - GetMappingsResponse getMappingsResponse = client.admin().indices().prepareGetMappings("twitter").get(); - assertEquals(1, getMappingsResponse.getMappings().size()); - ImmutableOpenMap indexMapping = getMappingsResponse.getMappings().get("twitter"); - assertThat(indexMapping.get("_doc"), instanceOf(MappingMetadata.class)); - - // we need to delete in order to create a fresh new index with another type - client.admin().indices().prepareDelete("twitter").get(); - client.admin().indices().prepareCreate("twitter").get(); - - // tag::putMapping-request-source - client.admin().indices().preparePutMapping("twitter") // <1> - .setType("_doc") - .setSource("{\n" + - " \"properties\": {\n" + - " \"name\": {\n" + // <2> - " \"type\": \"text\"\n" + - " }\n" + - " }\n" + - "}", XContentType.JSON) - .get(); - - // You can also provide the type in the source document - client.admin().indices().preparePutMapping("twitter") - .setType("_doc") - .setSource("{\n" + - " \"_doc\":{\n" + // <3> - " \"properties\": {\n" + - " \"name\": {\n" + - " \"type\": \"text\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}", XContentType.JSON) - .get(); - // end::putMapping-request-source - getMappingsResponse = client.admin().indices().prepareGetMappings("twitter").get(); - assertEquals(1, getMappingsResponse.getMappings().size()); - indexMapping = getMappingsResponse.getMappings().get("twitter"); - assertEquals( - singletonMap("properties", singletonMap("name", singletonMap("type", "text"))), - indexMapping.get("_doc").getSourceAsMap() - ); - } - -} diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/MinimumMasterNodesIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/MinimumMasterNodesIT.java index 0374ef7d1b59b..c3dc686921eb6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/MinimumMasterNodesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/MinimumMasterNodesIT.java @@ -121,7 +121,7 @@ public void testTwoNodesNoMasterBlock() throws Exception { NumShards numShards = getNumShards("test"); logger.info("--> indexing some data"); for (int i = 0; i < 100; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value").execute().actionGet(); } // make sure that all shards recovered before trying to flush assertThat( @@ -286,7 +286,7 @@ public void testThreeNodesNoMasterBlock() throws Exception { NumShards numShards = getNumShards("test"); logger.info("--> indexing some data"); for (int i = 0; i < 100; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value").execute().actionGet(); } ensureGreen(); // make sure that all shards recovered before trying to flush diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/NoMasterNodeIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/NoMasterNodeIT.java index 1fd61c9e063d0..cef22343a1fea 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/NoMasterNodeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/NoMasterNodeIT.java @@ -115,25 +115,25 @@ public void testNoMasterActions() throws Exception { }); assertRequestBuilderThrows( - clientToMasterlessNode.prepareGet("test", "type1", "1"), + clientToMasterlessNode.prepareGet("test", "1"), ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE ); assertRequestBuilderThrows( - clientToMasterlessNode.prepareGet("no_index", "type1", "1"), + clientToMasterlessNode.prepareGet("no_index", "1"), ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE ); assertRequestBuilderThrows( - clientToMasterlessNode.prepareMultiGet().add("test", "type1", "1"), + clientToMasterlessNode.prepareMultiGet().add("test", "1"), ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE ); assertRequestBuilderThrows( - clientToMasterlessNode.prepareMultiGet().add("no_index", "type1", "1"), + clientToMasterlessNode.prepareMultiGet().add("no_index", "1"), ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE ); @@ -165,7 +165,7 @@ public void testNoMasterActions() throws Exception { checkUpdateAction( false, timeout, - clientToMasterlessNode.prepareUpdate("test", "type1", "1") + clientToMasterlessNode.prepareUpdate("test", "1") .setScript(new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "test script", Collections.emptyMap())) .setTimeout(timeout) ); @@ -173,39 +173,41 @@ public void testNoMasterActions() throws Exception { checkUpdateAction( true, timeout, - clientToMasterlessNode.prepareUpdate("no_index", "type1", "1") + clientToMasterlessNode.prepareUpdate("no_index", "1") .setScript(new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "test script", Collections.emptyMap())) .setTimeout(timeout) ); checkWriteAction( - clientToMasterlessNode.prepareIndex("test", "type1", "1") + clientToMasterlessNode.prepareIndex("test") + .setId("1") .setSource(XContentFactory.jsonBuilder().startObject().endObject()) .setTimeout(timeout) ); checkWriteAction( - clientToMasterlessNode.prepareIndex("no_index", "type1", "1") + clientToMasterlessNode.prepareIndex("no_index") + .setId("1") .setSource(XContentFactory.jsonBuilder().startObject().endObject()) .setTimeout(timeout) ); BulkRequestBuilder bulkRequestBuilder = clientToMasterlessNode.prepareBulk(); bulkRequestBuilder.add( - clientToMasterlessNode.prepareIndex("test", "type1", "1").setSource(XContentFactory.jsonBuilder().startObject().endObject()) + clientToMasterlessNode.prepareIndex("test").setId("1").setSource(XContentFactory.jsonBuilder().startObject().endObject()) ); bulkRequestBuilder.add( - clientToMasterlessNode.prepareIndex("test", "type1", "2").setSource(XContentFactory.jsonBuilder().startObject().endObject()) + clientToMasterlessNode.prepareIndex("test").setId("2").setSource(XContentFactory.jsonBuilder().startObject().endObject()) ); bulkRequestBuilder.setTimeout(timeout); checkWriteAction(bulkRequestBuilder); bulkRequestBuilder = clientToMasterlessNode.prepareBulk(); bulkRequestBuilder.add( - clientToMasterlessNode.prepareIndex("no_index", "type1", "1").setSource(XContentFactory.jsonBuilder().startObject().endObject()) + clientToMasterlessNode.prepareIndex("no_index").setId("1").setSource(XContentFactory.jsonBuilder().startObject().endObject()) ); bulkRequestBuilder.add( - clientToMasterlessNode.prepareIndex("no_index", "type1", "2").setSource(XContentFactory.jsonBuilder().startObject().endObject()) + clientToMasterlessNode.prepareIndex("no_index").setId("2").setSource(XContentFactory.jsonBuilder().startObject().endObject()) ); bulkRequestBuilder.setTimeout(timeout); checkWriteAction(bulkRequestBuilder); @@ -252,8 +254,8 @@ public void testNoMasterActionsWriteMasterBlock() throws Exception { Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 3).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ).get(); client().admin().cluster().prepareHealth("_all").setWaitForGreenStatus().get(); - client().prepareIndex("test1", "type1", "1").setSource("field", "value1").get(); - client().prepareIndex("test2", "type1", "1").setSource("field", "value1").get(); + client().prepareIndex("test1").setId("1").setSource("field", "value1").get(); + client().prepareIndex("test2").setId("1").setSource("field", "value1").get(); refresh(); ensureSearchable("test1", "test2"); @@ -275,7 +277,7 @@ public void testNoMasterActionsWriteMasterBlock() throws Exception { assertTrue(state.blocks().hasGlobalBlockWithId(NoMasterBlockService.NO_MASTER_BLOCK_ID)); }); - GetResponse getResponse = clientToMasterlessNode.prepareGet("test1", "type1", "1").get(); + GetResponse getResponse = clientToMasterlessNode.prepareGet("test1", "1").get(); assertExists(getResponse); SearchResponse countResponse = clientToMasterlessNode.prepareSearch("test1").setAllowPartialSearchResults(true).setSize(0).get(); @@ -292,7 +294,7 @@ public void testNoMasterActionsWriteMasterBlock() throws Exception { TimeValue timeout = TimeValue.timeValueMillis(200); long now = System.currentTimeMillis(); try { - clientToMasterlessNode.prepareUpdate("test1", "type1", "1") + clientToMasterlessNode.prepareUpdate("test1", "1") .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2") .setTimeout(timeout) .get(); @@ -306,7 +308,8 @@ public void testNoMasterActionsWriteMasterBlock() throws Exception { } try { - clientToMasterlessNode.prepareIndex("test1", "type1", "1") + clientToMasterlessNode.prepareIndex("test1") + .setId("1") .setSource(XContentFactory.jsonBuilder().startObject().endObject()) .setTimeout(timeout) .get(); @@ -330,7 +333,7 @@ public void testNoMasterActionsMetadataWriteMasterBlock() throws Exception { Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) ).get(); client().admin().cluster().prepareHealth("_all").setWaitForGreenStatus().get(); - client().prepareIndex("test1", "type1").setId("1").setSource("field", "value1").get(); + client().prepareIndex("test1").setId("1").setSource("field", "value1").get(); refresh(); ensureGreen("test1"); @@ -371,10 +374,10 @@ public void testNoMasterActionsMetadataWriteMasterBlock() throws Exception { } }); - GetResponse getResponse = client(randomFrom(nodesWithShards)).prepareGet("test1", "type1", "1").get(); + GetResponse getResponse = client(randomFrom(nodesWithShards)).prepareGet("test1", "1").get(); assertExists(getResponse); - expectThrows(Exception.class, () -> client(partitionedNode).prepareGet("test1", "type1", "1").get()); + expectThrows(Exception.class, () -> client(partitionedNode).prepareGet("test1", "1").get()); SearchResponse countResponse = client(randomFrom(nodesWithShards)).prepareSearch("test1") .setAllowPartialSearchResults(true) @@ -388,20 +391,20 @@ public void testNoMasterActionsMetadataWriteMasterBlock() throws Exception { ); TimeValue timeout = TimeValue.timeValueMillis(200); - client(randomFrom(nodesWithShards)).prepareUpdate("test1", "type1", "1") + client(randomFrom(nodesWithShards)).prepareUpdate("test1", "1") .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2") .setTimeout(timeout) .get(); expectThrows( Exception.class, - () -> client(partitionedNode).prepareUpdate("test1", "type1", "1") + () -> client(partitionedNode).prepareUpdate("test1", "1") .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2") .setTimeout(timeout) .get() ); - client(randomFrom(nodesWithShards)).prepareIndex("test1", "type1") + client(randomFrom(nodesWithShards)).prepareIndex("test1") .setId("1") .setSource(XContentFactory.jsonBuilder().startObject().endObject()) .setTimeout(timeout) @@ -410,7 +413,7 @@ public void testNoMasterActionsMetadataWriteMasterBlock() throws Exception { // dynamic mapping updates fail expectThrows( MasterNotDiscoveredException.class, - () -> client(randomFrom(nodesWithShards)).prepareIndex("test1", "type1") + () -> client(randomFrom(nodesWithShards)).prepareIndex("test1") .setId("1") .setSource(XContentFactory.jsonBuilder().startObject().field("new_field", "value").endObject()) .setTimeout(timeout) @@ -420,7 +423,7 @@ public void testNoMasterActionsMetadataWriteMasterBlock() throws Exception { // dynamic index creation fails expectThrows( MasterNotDiscoveredException.class, - () -> client(randomFrom(nodesWithShards)).prepareIndex("test2", "type1") + () -> client(randomFrom(nodesWithShards)).prepareIndex("test2") .setId("1") .setSource(XContentFactory.jsonBuilder().startObject().endObject()) .setTimeout(timeout) @@ -429,7 +432,7 @@ public void testNoMasterActionsMetadataWriteMasterBlock() throws Exception { expectThrows( Exception.class, - () -> client(partitionedNode).prepareIndex("test1", "type1") + () -> client(partitionedNode).prepareIndex("test1") .setId("1") .setSource(XContentFactory.jsonBuilder().startObject().endObject()) .setTimeout(timeout) diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/SimpleClusterStateIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/SimpleClusterStateIT.java index 931a67655a92f..93a903e0b5e0c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/SimpleClusterStateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/SimpleClusterStateIT.java @@ -292,24 +292,15 @@ public void testLargeClusterStatePublishing() throws Exception { .get() ); ensureGreen(); // wait for green state, so its both green, and there are no more pending events - MappingMetadata masterMappingMetadata = client().admin() - .indices() - .prepareGetMappings("test") - .setTypes("type") - .get() - .getMappings() - .get("test") - .get("type"); + MappingMetadata masterMappingMetadata = client().admin().indices().prepareGetMappings("test").get().getMappings().get("test"); for (Client client : clients()) { MappingMetadata mappingMetadata = client.admin() .indices() .prepareGetMappings("test") - .setTypes("type") .setLocal(true) .get() .getMappings() - .get("test") - .get("type"); + .get("test"); assertThat(mappingMetadata.source().string(), equalTo(masterMappingMetadata.source().string())); assertThat(mappingMetadata, equalTo(masterMappingMetadata)); } diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/SpecificMasterNodesIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/SpecificMasterNodesIT.java index 9377fe284fce7..fc193163f75cc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/SpecificMasterNodesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/SpecificMasterNodesIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.admin.cluster.configuration.AddVotingConfigExclusionsAction; import org.opensearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.discovery.MasterNotDiscoveredException; import org.opensearch.index.query.QueryBuilders; import org.opensearch.test.OpenSearchIntegTestCase; @@ -321,11 +320,9 @@ public void testAliasFilterValidation() { internalCluster().startDataOnlyNode(); assertAcked( - prepareCreate("test").addMapping( - "type1", - "{\"type1\" : {\"properties\" : {\"table_a\" : { \"type\" : \"nested\", " - + "\"properties\" : {\"field_a\" : { \"type\" : \"keyword\" },\"field_b\" :{ \"type\" : \"keyword\" }}}}}}", - XContentType.JSON + prepareCreate("test").setMapping( + "{\"properties\" : {\"table_a\" : { \"type\" : \"nested\", " + + "\"properties\" : {\"field_a\" : { \"type\" : \"keyword\" },\"field_b\" :{ \"type\" : \"keyword\" }}}}}" ) ); client().admin() diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/ClusterRerouteIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/ClusterRerouteIT.java index 1c5ff5deada1d..cdf853c2ad9ae 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/ClusterRerouteIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/ClusterRerouteIT.java @@ -335,7 +335,7 @@ private void rerouteWithAllocateLocalGateway(Settings commonSettings) throws Exc ); if (closed == false) { - client().prepareIndex("test", "type", "1").setSource("field", "value").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("field", "value").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); } final Index index = resolveIndex("test"); diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/FilteringAllocationIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/FilteringAllocationIT.java index 6e3ffc79da580..398adbd0d1ca5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/FilteringAllocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/FilteringAllocationIT.java @@ -72,7 +72,7 @@ public void testDecommissionNodeNoReplicas() { ensureGreen("test"); logger.info("--> index some data"); for (int i = 0; i < 100; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); } client().admin().indices().prepareRefresh().execute().actionGet(); assertThat( @@ -187,7 +187,7 @@ public void testDisablingAllocationFiltering() { logger.info("--> index some data"); for (int i = 0; i < 100; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); } client().admin().indices().prepareRefresh().execute().actionGet(); assertThat( diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/RareClusterStateIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/RareClusterStateIT.java index 6317d633f25ea..3060b5c23fe75 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/RareClusterStateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/RareClusterStateIT.java @@ -53,7 +53,6 @@ import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.cluster.routing.allocation.AllocationService; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.discovery.Discovery; @@ -177,14 +176,16 @@ public void testDeleteCreateInOneBulk() throws Exception { internalCluster().startMasterOnlyNode(); String dataNode = internalCluster().startDataOnlyNode(); assertFalse(client().admin().cluster().prepareHealth().setWaitForNodes("2").get().isTimedOut()); - prepareCreate("test").setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)).addMapping("type").get(); + prepareCreate("test").setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)) + .addMapping(MapperService.SINGLE_MAPPING_NAME) + .get(); ensureGreen("test"); // block none master node. BlockClusterStateProcessing disruption = new BlockClusterStateProcessing(dataNode, random()); internalCluster().setDisruptionScheme(disruption); logger.info("--> indexing a doc"); - index("test", "type", "1"); + index("test", MapperService.SINGLE_MAPPING_NAME, "1"); refresh(); disruption.startDisrupting(); logger.info("--> delete index and recreate it"); @@ -264,19 +265,12 @@ public void testDelayedMappingPropagationOnPrimary() throws Exception { // Add a new mapping... ActionFuture putMappingResponse = executeAndCancelCommittedPublication( - client().admin().indices().preparePutMapping("index").setType("type").setSource("field", "type=long") + client().admin().indices().preparePutMapping("index").setSource("field", "type=long") ); // ...and wait for mappings to be available on master assertBusy(() -> { - ImmutableOpenMap indexMappings = client().admin() - .indices() - .prepareGetMappings("index") - .get() - .getMappings() - .get("index"); - assertNotNull(indexMappings); - MappingMetadata typeMappings = indexMappings.get("type"); + MappingMetadata typeMappings = client().admin().indices().prepareGetMappings("index").get().getMappings().get("index"); assertNotNull(typeMappings); Object properties; try { @@ -291,7 +285,7 @@ public void testDelayedMappingPropagationOnPrimary() throws Exception { // this request does not change the cluster state, because mapping is already created, // we don't await and cancel committed publication - ActionFuture docIndexResponse = client().prepareIndex("index", "type", "1").setSource("field", 42).execute(); + ActionFuture docIndexResponse = client().prepareIndex("index").setId("1").setSource("field", 42).execute(); // Wait a bit to make sure that the reason why we did not get a response // is that cluster state processing is blocked and not just that it takes @@ -361,7 +355,7 @@ public void testDelayedMappingPropagationOnReplica() throws Exception { internalCluster().setDisruptionScheme(disruption); disruption.startDisrupting(); final ActionFuture putMappingResponse = executeAndCancelCommittedPublication( - client().admin().indices().preparePutMapping("index").setType("type").setSource("field", "type=long") + client().admin().indices().preparePutMapping("index").setSource("field", "type=long") ); final Index index = resolveIndex("index"); @@ -371,14 +365,14 @@ public void testDelayedMappingPropagationOnReplica() throws Exception { final IndexService indexService = indicesService.indexServiceSafe(index); assertNotNull(indexService); final MapperService mapperService = indexService.mapperService(); - DocumentMapper mapper = mapperService.documentMapper("type"); + DocumentMapper mapper = mapperService.documentMapper(); assertNotNull(mapper); assertNotNull(mapper.mappers().getMapper("field")); }); - final ActionFuture docIndexResponse = client().prepareIndex("index", "type", "1").setSource("field", 42).execute(); + final ActionFuture docIndexResponse = client().prepareIndex("index").setId("1").setSource("field", 42).execute(); - assertBusy(() -> assertTrue(client().prepareGet("index", "type", "1").get().isExists())); + assertBusy(() -> assertTrue(client().prepareGet("index", "1").get().isExists())); // index another document, this time using dynamic mappings. // The ack timeout of 0 on dynamic mapping updates makes it possible for the document to be indexed on the primary, even @@ -386,7 +380,7 @@ public void testDelayedMappingPropagationOnReplica() throws Exception { // this request does not change the cluster state, because the mapping is dynamic, // we need to await and cancel committed publication ActionFuture dynamicMappingsFut = executeAndCancelCommittedPublication( - client().prepareIndex("index", "type", "2").setSource("field2", 42) + client().prepareIndex("index").setId("2").setSource("field2", 42) ); // ...and wait for second mapping to be available on master @@ -395,12 +389,12 @@ public void testDelayedMappingPropagationOnReplica() throws Exception { final IndexService indexService = indicesService.indexServiceSafe(index); assertNotNull(indexService); final MapperService mapperService = indexService.mapperService(); - DocumentMapper mapper = mapperService.documentMapper("type"); + DocumentMapper mapper = mapperService.documentMapper(); assertNotNull(mapper); assertNotNull(mapper.mappers().getMapper("field2")); }); - assertBusy(() -> assertTrue(client().prepareGet("index", "type", "2").get().isExists())); + assertBusy(() -> assertTrue(client().prepareGet("index", "2").get().isExists())); // The mappings have not been propagated to the replica yet as a consequence the document count not be indexed // We wait on purpose to make sure that the document is not indexed because the shard operation is stalled diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/UnsafeBootstrapAndDetachCommandIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/UnsafeBootstrapAndDetachCommandIT.java index 5c07ef8e7baea..1447379b93ec8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/UnsafeBootstrapAndDetachCommandIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/UnsafeBootstrapAndDetachCommandIT.java @@ -430,7 +430,7 @@ public void testAllMasterEligibleNodesFailedDanglingIndexImport() throws Excepti ensureStableCluster(2); logger.info("--> index 1 doc and ensure index is green"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); ensureGreen("test"); assertBusy( () -> internalCluster().getInstances(IndicesService.class) @@ -439,7 +439,7 @@ public void testAllMasterEligibleNodesFailedDanglingIndexImport() throws Excepti logger.info("--> verify 1 doc in the index"); assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L); - assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("test", "1").execute().actionGet().isExists(), equalTo(true)); logger.info("--> stop data-only node and detach it from the old cluster"); Settings dataNodeDataPathSettings = Settings.builder() @@ -474,7 +474,7 @@ public boolean clearData(String nodeName) { ensureGreen("test"); logger.info("--> verify the doc is there"); - assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("test", "1").execute().actionGet().isExists(), equalTo(true)); } public void testNoInitialBootstrapAfterDetach() throws Exception { diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/AllocationIdIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/AllocationIdIT.java index 2dad58550228e..a20e944caebb2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/AllocationIdIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/AllocationIdIT.java @@ -193,7 +193,7 @@ private int indexDocs(String indexName, Object... source) throws InterruptedExce final int numExtraDocs = between(10, 100); IndexRequestBuilder[] builders = new IndexRequestBuilder[numExtraDocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex(indexName, "type").setSource(source); + builders[i] = client().prepareIndex(indexName).setSource(source); } indexRandom(true, false, true, Arrays.asList(builders)); diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/DelayedAllocationIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/DelayedAllocationIT.java index bf19444db9159..b4b08a4c9ad73 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/DelayedAllocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/DelayedAllocationIT.java @@ -205,7 +205,7 @@ private void indexRandomData() throws Exception { int numDocs = scaledRandomIntBetween(100, 1000); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex("test", "type").setSource("field", "value"); + builders[i] = client().prepareIndex("test").setSource("field", "value"); } // we want to test both full divergent copies of the shard in terms of segments, and // a case where they are the same (using sync flush), index Random does all this goodness diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/PrimaryAllocationIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/PrimaryAllocationIT.java index 8418101bc7a09..55bdc2a4ac3c4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/PrimaryAllocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/PrimaryAllocationIT.java @@ -128,8 +128,8 @@ public void testBulkWeirdScenario() throws Exception { ensureGreen(); BulkResponse bulkResponse = client().prepareBulk() - .add(client().prepareIndex().setIndex("test").setType("_doc").setId("1").setSource("field1", "value1")) - .add(client().prepareUpdate().setIndex("test").setType("_doc").setId("1").setDoc("field2", "value2")) + .add(client().prepareIndex().setIndex("test").setId("1").setSource("field1", "value1")) + .add(client().prepareUpdate().setIndex("test").setId("1").setDoc("field2", "value2")) .execute() .actionGet(); @@ -150,7 +150,7 @@ public void testBulkWeirdScenario() throws Exception { // returns data paths settings of in-sync shard copy private Settings createStaleReplicaScenario(String master) throws Exception { - client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); + client().prepareIndex("test").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); refresh(); ClusterState state = client().admin().cluster().prepareState().all().get().getState(); List shards = state.routingTable().allShards("test"); @@ -177,7 +177,7 @@ private Settings createStaleReplicaScenario(String master) throws Exception { ensureStableCluster(2, master); logger.info("--> index a document into previous replica shard (that is now primary)"); - client(replicaNode).prepareIndex("test", "type1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); + client(replicaNode).prepareIndex("test").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); logger.info("--> shut down node that has new acknowledged document"); final Settings inSyncDataPathSettings = internalCluster().dataPathSettings(replicaNode); @@ -558,7 +558,7 @@ public void testRemoveAllocationIdOnWriteAfterNodeLeave() throws Exception { ensureYellow("test"); assertEquals(2, client().admin().cluster().prepareState().get().getState().metadata().index("test").inSyncAllocationIds(0).size()); logger.info("--> indexing..."); - client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); + client().prepareIndex("test").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); assertEquals(1, client().admin().cluster().prepareState().get().getState().metadata().index("test").inSyncAllocationIds(0).size()); internalCluster().restartRandomDataNode(new InternalTestCluster.RestartCallback() { @Override @@ -595,7 +595,7 @@ public void testNotWaitForQuorumCopies() throws Exception { .get() ); ensureGreen("test"); - client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); + client().prepareIndex("test").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); logger.info("--> removing 2 nodes from cluster"); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(nodes.get(1), nodes.get(2))); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(nodes.get(1), nodes.get(2))); diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java index 35a27f8b6b176..96f059695e719 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java @@ -302,7 +302,7 @@ private long createReasonableSizedShards(final String indexName) throws Interrup while (true) { final IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[scaledRandomIntBetween(100, 10000)]; for (int i = 0; i < indexRequestBuilders.length; i++) { - indexRequestBuilders[i] = client().prepareIndex(indexName, "_doc").setSource("field", randomAlphaOfLength(10)); + indexRequestBuilders[i] = client().prepareIndex(indexName).setSource("field", randomAlphaOfLength(10)); } indexRandom(true, indexRequestBuilders); forceMerge(); diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java index fea19fe450c7c..05b0f10be02f3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java @@ -212,7 +212,7 @@ public void testAutomaticReleaseOfIndexBlock() throws Exception { assertThat("node2 has 2 shards", shardCountByNodeId.get(nodeIds.get(2)), equalTo(2)); } - client().prepareIndex("test", "doc", "1").setSource("foo", "bar").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("foo", "bar").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); assertSearchHits(client().prepareSearch("test").get(), "1"); // Move all nodes above the low watermark so no shard movement can occur, and at least one node above the flood stage watermark so @@ -227,7 +227,7 @@ public void testAutomaticReleaseOfIndexBlock() throws Exception { assertBusy( () -> assertBlocked( - client().prepareIndex().setIndex("test").setType("doc").setId("1").setSource("foo", "bar"), + client().prepareIndex().setIndex("test").setId("1").setSource("foo", "bar"), IndexMetadata.INDEX_READ_ONLY_ALLOW_DELETE_BLOCK ) ); @@ -236,7 +236,7 @@ public void testAutomaticReleaseOfIndexBlock() throws Exception { // Cannot add further documents assertBlocked( - client().prepareIndex().setIndex("test").setType("doc").setId("2").setSource("foo", "bar"), + client().prepareIndex().setIndex("test").setId("2").setSource("foo", "bar"), IndexMetadata.INDEX_READ_ONLY_ALLOW_DELETE_BLOCK ); assertSearchHits(client().prepareSearch("test").get(), "1"); @@ -249,7 +249,8 @@ public void testAutomaticReleaseOfIndexBlock() throws Exception { // Attempt to create a new document until DiskUsageMonitor unblocks the index assertBusy(() -> { try { - client().prepareIndex("test", "doc", "3") + client().prepareIndex("test") + .setId("3") .setSource("foo", "bar") .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/shards/ClusterSearchShardsIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/shards/ClusterSearchShardsIT.java index f804648e82e91..86e83e01b008c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/shards/ClusterSearchShardsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/shards/ClusterSearchShardsIT.java @@ -173,7 +173,7 @@ public void testClusterSearchShardsWithBlocks() { int docs = between(10, 100); for (int i = 0; i < docs; i++) { - client().prepareIndex("test-blocks", "type", "" + i).setSource("test", "init").execute().actionGet(); + client().prepareIndex("test-blocks").setId("" + i).setSource("test", "init").execute().actionGet(); } ensureGreen("test-blocks"); diff --git a/server/src/internalClusterTest/java/org/opensearch/discovery/ClusterDisruptionCleanSettingsIT.java b/server/src/internalClusterTest/java/org/opensearch/discovery/ClusterDisruptionCleanSettingsIT.java index 1754c61a1ab61..61a47d2bb0237 100644 --- a/server/src/internalClusterTest/java/org/opensearch/discovery/ClusterDisruptionCleanSettingsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/discovery/ClusterDisruptionCleanSettingsIT.java @@ -80,9 +80,7 @@ public void testSearchWithRelocationAndSlowClusterStateProcessing() throws Excep final String node_2 = internalCluster().startDataOnlyNode(); List indexRequestBuilderList = new ArrayList<>(); for (int i = 0; i < 100; i++) { - indexRequestBuilderList.add( - client().prepareIndex().setIndex("test").setType("_doc").setSource("{\"int_field\":1}", XContentType.JSON) - ); + indexRequestBuilderList.add(client().prepareIndex().setIndex("test").setSource("{\"int_field\":1}", XContentType.JSON)); } indexRandom(true, indexRequestBuilderList); diff --git a/server/src/internalClusterTest/java/org/opensearch/discovery/ClusterDisruptionIT.java b/server/src/internalClusterTest/java/org/opensearch/discovery/ClusterDisruptionIT.java index 0bfd3e22a3bc9..53002a38c3a9d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/discovery/ClusterDisruptionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/discovery/ClusterDisruptionIT.java @@ -171,7 +171,8 @@ public void testAckedIndexing() throws Exception { id = Integer.toString(idGenerator.incrementAndGet()); int shard = Math.floorMod(Murmur3HashFunction.hash(id), numPrimaries); logger.trace("[{}] indexing id [{}] through node [{}] targeting shard [{}]", name, id, node, shard); - IndexRequestBuilder indexRequestBuilder = client.prepareIndex("test", "type", id) + IndexRequestBuilder indexRequestBuilder = client.prepareIndex("test") + .setId(id) .setSource(Collections.singletonMap(randomFrom(fieldNames), randomNonNegativeLong()), XContentType.JSON) .setTimeout(timeout); @@ -255,7 +256,7 @@ public void testAckedIndexing() throws Exception { for (String id : ackedDocs.keySet()) { assertTrue( "doc [" + id + "] indexed via node [" + ackedDocs.get(id) + "] not found", - client(node).prepareGet("test", "type", id).setPreference("_local").get().isExists() + client(node).prepareGet("test", id).setPreference("_local").get().isExists() ); } } catch (AssertionError | NoShardAvailableActionException e) { @@ -308,15 +309,12 @@ public void testRejoinDocumentExistsInAllShardCopies() throws Exception { ensureStableCluster(2, notIsolatedNode); assertFalse(client(notIsolatedNode).admin().cluster().prepareHealth("test").setWaitForYellowStatus().get().isTimedOut()); - IndexResponse indexResponse = internalCluster().client(notIsolatedNode) - .prepareIndex("test", "type") - .setSource("field", "value") - .get(); + IndexResponse indexResponse = internalCluster().client(notIsolatedNode).prepareIndex("test").setSource("field", "value").get(); assertThat(indexResponse.getVersion(), equalTo(1L)); logger.info("Verifying if document exists via node[{}]", notIsolatedNode); GetResponse getResponse = internalCluster().client(notIsolatedNode) - .prepareGet("test", "type", indexResponse.getId()) + .prepareGet("test", indexResponse.getId()) .setPreference("_local") .get(); assertThat(getResponse.isExists(), is(true)); @@ -330,7 +328,7 @@ public void testRejoinDocumentExistsInAllShardCopies() throws Exception { for (String node : nodes) { logger.info("Verifying if document exists after isolating node[{}] via node[{}]", isolatedNode, node); - getResponse = internalCluster().client(node).prepareGet("test", "type", indexResponse.getId()).setPreference("_local").get(); + getResponse = internalCluster().client(node).prepareGet("test", indexResponse.getId()).setPreference("_local").get(); assertThat(getResponse.isExists(), is(true)); assertThat(getResponse.getVersion(), equalTo(1L)); assertThat(getResponse.getId(), equalTo(indexResponse.getId())); @@ -514,7 +512,8 @@ public void testRestartNodeWhileIndexing() throws Exception { while (stopped.get() == false && docID.get() < 5000) { String id = Integer.toString(docID.incrementAndGet()); try { - IndexResponse response = client().prepareIndex(index, "_doc", id) + IndexResponse response = client().prepareIndex(index) + .setId(id) .setSource(Collections.singletonMap("f" + randomIntBetween(1, 10), randomNonNegativeLong()), XContentType.JSON) .get(); assertThat(response.getResult(), is(oneOf(CREATED, UPDATED))); diff --git a/server/src/internalClusterTest/java/org/opensearch/discovery/MasterDisruptionIT.java b/server/src/internalClusterTest/java/org/opensearch/discovery/MasterDisruptionIT.java index 06fc638b299aa..5f90e15701331 100644 --- a/server/src/internalClusterTest/java/org/opensearch/discovery/MasterDisruptionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/discovery/MasterDisruptionIT.java @@ -298,9 +298,9 @@ public void testMappingTimeout() throws Exception { disruption.startDisrupting(); BulkRequestBuilder bulk = client().prepareBulk(); - bulk.add(client().prepareIndex("test", "doc", "2").setSource("{ \"f\": 1 }", XContentType.JSON)); - bulk.add(client().prepareIndex("test", "doc", "3").setSource("{ \"g\": 1 }", XContentType.JSON)); - bulk.add(client().prepareIndex("test", "doc", "4").setSource("{ \"f\": 1 }", XContentType.JSON)); + bulk.add(client().prepareIndex("test").setId("2").setSource("{ \"f\": 1 }", XContentType.JSON)); + bulk.add(client().prepareIndex("test").setId("3").setSource("{ \"g\": 1 }", XContentType.JSON)); + bulk.add(client().prepareIndex("test").setId("4").setSource("{ \"f\": 1 }", XContentType.JSON)); BulkResponse bulkResponse = bulk.get(); assertTrue(bulkResponse.hasFailures()); diff --git a/server/src/internalClusterTest/java/org/opensearch/discovery/SnapshotDisruptionIT.java b/server/src/internalClusterTest/java/org/opensearch/discovery/SnapshotDisruptionIT.java index 4fce66af21101..086aeb695c411 100644 --- a/server/src/internalClusterTest/java/org/opensearch/discovery/SnapshotDisruptionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/discovery/SnapshotDisruptionIT.java @@ -244,7 +244,7 @@ public void testMasterFailOverDuringShardSnapshots() throws Exception { final String indexName = "index-one"; createIndex(indexName); - client().prepareIndex(indexName, "_doc").setSource("foo", "bar").get(); + client().prepareIndex(indexName).setSource("foo", "bar").get(); blockDataNode(repoName, dataNode); @@ -294,7 +294,7 @@ private void createRandomIndex(String idxName) throws InterruptedException { final int numdocs = randomIntBetween(10, 100); IndexRequestBuilder[] builders = new IndexRequestBuilder[numdocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex(idxName, "type1", Integer.toString(i)).setSource("field1", "bar " + i); + builders[i] = client().prepareIndex(idxName).setId(Integer.toString(i)).setSource("field1", "bar " + i); } indexRandom(true, builders); } diff --git a/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java b/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java index d1138351bde76..f3693be3b7227 100644 --- a/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java @@ -47,6 +47,7 @@ import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.index.mapper.MapperService; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.hamcrest.OpenSearchAssertions; @@ -83,14 +84,12 @@ public void testIndexActions() throws Exception { logger.info("Indexing [type1/1]"); IndexResponse indexResponse = client().prepareIndex() .setIndex("test") - .setType("type1") .setId("1") .setSource(source("1", "test")) .setRefreshPolicy(RefreshPolicy.IMMEDIATE) .get(); assertThat(indexResponse.getIndex(), equalTo(getConcreteIndexName())); assertThat(indexResponse.getId(), equalTo("1")); - assertThat(indexResponse.getType(), equalTo("type1")); logger.info("Refreshing"); RefreshResponse refreshResponse = refresh(); assertThat(refreshResponse.getSuccessfulShards(), equalTo(numShards.totalNumShards)); @@ -117,18 +116,18 @@ public void testIndexActions() throws Exception { logger.info("Get [type1/1]"); for (int i = 0; i < 5; i++) { - getResult = client().prepareGet("test", "type1", "1").execute().actionGet(); + getResult = client().prepareGet("test", "1").execute().actionGet(); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("1", "test")))); assertThat("cycle(map) #" + i, (String) getResult.getSourceAsMap().get("name"), equalTo("test")); - getResult = client().get(getRequest("test").type("type1").id("1")).actionGet(); + getResult = client().get(getRequest("test").id("1")).actionGet(); assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("1", "test")))); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); } logger.info("Get [type1/1] with script"); for (int i = 0; i < 5; i++) { - getResult = client().prepareGet("test", "type1", "1").setStoredFields("name").execute().actionGet(); + getResult = client().prepareGet("test", "1").setStoredFields("name").execute().actionGet(); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); assertThat(getResult.isExists(), equalTo(true)); assertThat(getResult.getSourceAsBytes(), nullValue()); @@ -137,28 +136,27 @@ public void testIndexActions() throws Exception { logger.info("Get [type1/2] (should be empty)"); for (int i = 0; i < 5; i++) { - getResult = client().get(getRequest("test").type("type1").id("2")).actionGet(); + getResult = client().get(getRequest("test").id("2")).actionGet(); assertThat(getResult.isExists(), equalTo(false)); } logger.info("Delete [type1/1]"); - DeleteResponse deleteResponse = client().prepareDelete("test", "type1", "1").execute().actionGet(); + DeleteResponse deleteResponse = client().prepareDelete("test", "1").execute().actionGet(); assertThat(deleteResponse.getIndex(), equalTo(getConcreteIndexName())); assertThat(deleteResponse.getId(), equalTo("1")); - assertThat(deleteResponse.getType(), equalTo("type1")); logger.info("Refreshing"); client().admin().indices().refresh(refreshRequest("test")).actionGet(); logger.info("Get [type1/1] (should be empty)"); for (int i = 0; i < 5; i++) { - getResult = client().get(getRequest("test").type("type1").id("1")).actionGet(); + getResult = client().get(getRequest("test").id("1")).actionGet(); assertThat(getResult.isExists(), equalTo(false)); } logger.info("Index [type1/1]"); - client().index(indexRequest("test").type("type1").id("1").source(source("1", "test"))).actionGet(); + client().index(indexRequest("test").id("1").source(source("1", "test"))).actionGet(); logger.info("Index [type1/2]"); - client().index(indexRequest("test").type("type1").id("2").source(source("2", "test2"))).actionGet(); + client().index(indexRequest("test").id("2").source(source("2", "test2"))).actionGet(); logger.info("Flushing"); FlushResponse flushResult = client().admin().indices().prepareFlush("test").execute().actionGet(); @@ -169,10 +167,10 @@ public void testIndexActions() throws Exception { logger.info("Get [type1/1] and [type1/2]"); for (int i = 0; i < 5; i++) { - getResult = client().get(getRequest("test").type("type1").id("1")).actionGet(); + getResult = client().get(getRequest("test").id("1")).actionGet(); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("1", "test")))); - getResult = client().get(getRequest("test").type("type1").id("2")).actionGet(); + getResult = client().get(getRequest("test").id("2")).actionGet(); String ste1 = getResult.getSourceAsString(); String ste2 = Strings.toString(source("2", "test2")); assertThat("cycle #" + i, ste1, equalTo(ste2)); @@ -185,7 +183,7 @@ public void testIndexActions() throws Exception { // test successful SearchResponse countResponse = client().prepareSearch("test") .setSize(0) - .setQuery(termQuery("_type", "type1")) + .setQuery(termQuery("_type", MapperService.SINGLE_MAPPING_NAME)) .execute() .actionGet(); assertNoFailures(countResponse); @@ -213,12 +211,12 @@ public void testBulk() throws Exception { ensureGreen(); BulkResponse bulkResponse = client().prepareBulk() - .add(client().prepareIndex().setIndex("test").setType("type1").setId("1").setSource(source("1", "test"))) - .add(client().prepareIndex().setIndex("test").setType("type1").setId("2").setSource(source("2", "test")).setCreate(true)) - .add(client().prepareIndex().setIndex("test").setType("type1").setSource(source("3", "test"))) - .add(client().prepareIndex().setIndex("test").setType("type1").setCreate(true).setSource(source("4", "test"))) - .add(client().prepareDelete().setIndex("test").setType("type1").setId("1")) - .add(client().prepareIndex().setIndex("test").setType("type1").setSource("{ xxx }", XContentType.JSON)) // failure + .add(client().prepareIndex().setIndex("test").setId("1").setSource(source("1", "test"))) + .add(client().prepareIndex().setIndex("test").setId("2").setSource(source("2", "test")).setCreate(true)) + .add(client().prepareIndex().setIndex("test").setSource(source("3", "test"))) + .add(client().prepareIndex().setIndex("test").setCreate(true).setSource(source("4", "test"))) + .add(client().prepareDelete().setIndex("test").setId("1")) + .add(client().prepareIndex().setIndex("test").setSource("{ xxx }", XContentType.JSON)) // failure .execute() .actionGet(); @@ -228,37 +226,31 @@ public void testBulk() throws Exception { assertThat(bulkResponse.getItems()[0].isFailed(), equalTo(false)); assertThat(bulkResponse.getItems()[0].getOpType(), equalTo(OpType.INDEX)); assertThat(bulkResponse.getItems()[0].getIndex(), equalTo(getConcreteIndexName())); - assertThat(bulkResponse.getItems()[0].getType(), equalTo("type1")); assertThat(bulkResponse.getItems()[0].getId(), equalTo("1")); assertThat(bulkResponse.getItems()[1].isFailed(), equalTo(false)); assertThat(bulkResponse.getItems()[1].getOpType(), equalTo(OpType.CREATE)); assertThat(bulkResponse.getItems()[1].getIndex(), equalTo(getConcreteIndexName())); - assertThat(bulkResponse.getItems()[1].getType(), equalTo("type1")); assertThat(bulkResponse.getItems()[1].getId(), equalTo("2")); assertThat(bulkResponse.getItems()[2].isFailed(), equalTo(false)); assertThat(bulkResponse.getItems()[2].getOpType(), equalTo(OpType.INDEX)); assertThat(bulkResponse.getItems()[2].getIndex(), equalTo(getConcreteIndexName())); - assertThat(bulkResponse.getItems()[2].getType(), equalTo("type1")); String generatedId3 = bulkResponse.getItems()[2].getId(); assertThat(bulkResponse.getItems()[3].isFailed(), equalTo(false)); assertThat(bulkResponse.getItems()[3].getOpType(), equalTo(OpType.CREATE)); assertThat(bulkResponse.getItems()[3].getIndex(), equalTo(getConcreteIndexName())); - assertThat(bulkResponse.getItems()[3].getType(), equalTo("type1")); String generatedId4 = bulkResponse.getItems()[3].getId(); assertThat(bulkResponse.getItems()[4].isFailed(), equalTo(false)); assertThat(bulkResponse.getItems()[4].getOpType(), equalTo(OpType.DELETE)); assertThat(bulkResponse.getItems()[4].getIndex(), equalTo(getConcreteIndexName())); - assertThat(bulkResponse.getItems()[4].getType(), equalTo("type1")); assertThat(bulkResponse.getItems()[4].getId(), equalTo("1")); assertThat(bulkResponse.getItems()[5].isFailed(), equalTo(true)); assertThat(bulkResponse.getItems()[5].getOpType(), equalTo(OpType.INDEX)); assertThat(bulkResponse.getItems()[5].getIndex(), equalTo(getConcreteIndexName())); - assertThat(bulkResponse.getItems()[5].getType(), equalTo("type1")); waitForRelocation(ClusterHealthStatus.GREEN); RefreshResponse refreshResponse = client().admin().indices().prepareRefresh("test").execute().actionGet(); @@ -266,15 +258,15 @@ public void testBulk() throws Exception { assertThat(refreshResponse.getSuccessfulShards(), equalTo(numShards.totalNumShards)); for (int i = 0; i < 5; i++) { - GetResponse getResult = client().get(getRequest("test").type("type1").id("1")).actionGet(); + GetResponse getResult = client().get(getRequest("test").id("1")).actionGet(); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); assertThat("cycle #" + i, getResult.isExists(), equalTo(false)); - getResult = client().get(getRequest("test").type("type1").id("2")).actionGet(); + getResult = client().get(getRequest("test").id("2")).actionGet(); assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("2", "test")))); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); - getResult = client().get(getRequest("test").type("type1").id(generatedId3)).actionGet(); + getResult = client().get(getRequest("test").id(generatedId3)).actionGet(); assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("3", "test")))); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); diff --git a/server/src/internalClusterTest/java/org/opensearch/document/ShardInfoIT.java b/server/src/internalClusterTest/java/org/opensearch/document/ShardInfoIT.java index 41b34516c30bb..be1335bd56ba9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/document/ShardInfoIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/document/ShardInfoIT.java @@ -60,18 +60,15 @@ public class ShardInfoIT extends OpenSearchIntegTestCase { public void testIndexAndDelete() throws Exception { prepareIndex(1); - IndexResponse indexResponse = client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON).get(); + IndexResponse indexResponse = client().prepareIndex("idx").setSource("{}", XContentType.JSON).get(); assertShardInfo(indexResponse); - DeleteResponse deleteResponse = client().prepareDelete("idx", "type", indexResponse.getId()).get(); + DeleteResponse deleteResponse = client().prepareDelete("idx", indexResponse.getId()).get(); assertShardInfo(deleteResponse); } public void testUpdate() throws Exception { prepareIndex(1); - UpdateResponse updateResponse = client().prepareUpdate("idx", "type", "1") - .setDoc("{}", XContentType.JSON) - .setDocAsUpsert(true) - .get(); + UpdateResponse updateResponse = client().prepareUpdate("idx", "1").setDoc("{}", XContentType.JSON).setDocAsUpsert(true).get(); assertShardInfo(updateResponse); } @@ -79,7 +76,7 @@ public void testBulkWithIndexAndDeleteItems() throws Exception { prepareIndex(1); BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); for (int i = 0; i < 10; i++) { - bulkRequestBuilder.add(client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + bulkRequestBuilder.add(client().prepareIndex("idx").setSource("{}", XContentType.JSON)); } BulkResponse bulkResponse = bulkRequestBuilder.get(); @@ -87,7 +84,7 @@ public void testBulkWithIndexAndDeleteItems() throws Exception { for (BulkItemResponse item : bulkResponse) { assertThat(item.isFailed(), equalTo(false)); assertShardInfo(item.getResponse()); - bulkRequestBuilder.add(client().prepareDelete("idx", "type", item.getId())); + bulkRequestBuilder.add(client().prepareDelete("idx", item.getId())); } bulkResponse = bulkRequestBuilder.get(); @@ -101,9 +98,7 @@ public void testBulkWithUpdateItems() throws Exception { prepareIndex(1); BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); for (int i = 0; i < 10; i++) { - bulkRequestBuilder.add( - client().prepareUpdate("idx", "type", Integer.toString(i)).setDoc("{}", XContentType.JSON).setDocAsUpsert(true) - ); + bulkRequestBuilder.add(client().prepareUpdate("idx", Integer.toString(i)).setDoc("{}", XContentType.JSON).setDocAsUpsert(true)); } BulkResponse bulkResponse = bulkRequestBuilder.get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/env/NodeEnvironmentIT.java b/server/src/internalClusterTest/java/org/opensearch/env/NodeEnvironmentIT.java index fc38387b5e587..0bebcce27f975 100644 --- a/server/src/internalClusterTest/java/org/opensearch/env/NodeEnvironmentIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/env/NodeEnvironmentIT.java @@ -104,7 +104,7 @@ public Settings onNodeStopped(String nodeName) { internalCluster().startNode(dataPathSettings); logger.info("--> indexing a simple document"); - client().prepareIndex(indexName, "type1", "1").setSource("field1", "value1").get(); + client().prepareIndex(indexName).setId("1").setSource("field1", "value1").get(); logger.info("--> restarting the node without the data role"); ex = expectThrows( diff --git a/server/src/internalClusterTest/java/org/opensearch/env/NodeRepurposeCommandIT.java b/server/src/internalClusterTest/java/org/opensearch/env/NodeRepurposeCommandIT.java index ccb2920c274eb..2547333490f23 100644 --- a/server/src/internalClusterTest/java/org/opensearch/env/NodeRepurposeCommandIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/env/NodeRepurposeCommandIT.java @@ -65,11 +65,11 @@ public void testRepurpose() throws Exception { prepareCreate(indexName, Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0)).get(); logger.info("--> indexing a simple document"); - client().prepareIndex(indexName, "type1", "1").setSource("field1", "value1").get(); + client().prepareIndex(indexName).setId("1").setSource("field1", "value1").get(); ensureGreen(); - assertTrue(client().prepareGet(indexName, "type1", "1").get().isExists()); + assertTrue(client().prepareGet(indexName, "1").get().isExists()); final Settings masterNodeDataPathSettings = internalCluster().dataPathSettings(masterNode); final Settings dataNodeDataPathSettings = internalCluster().dataPathSettings(dataNode); @@ -112,7 +112,7 @@ public void testRepurpose() throws Exception { internalCluster().startCoordinatingOnlyNode(dataNodeDataPathSettings); assertTrue(indexExists(indexName)); - expectThrows(NoShardAvailableActionException.class, () -> client().prepareGet(indexName, "type1", "1").get()); + expectThrows(NoShardAvailableActionException.class, () -> client().prepareGet(indexName, "1").get()); logger.info("--> Restarting and repurposing other node"); diff --git a/server/src/internalClusterTest/java/org/opensearch/explain/ExplainActionIT.java b/server/src/internalClusterTest/java/org/opensearch/explain/ExplainActionIT.java index 79fe3a9119eae..78069970c1a60 100644 --- a/server/src/internalClusterTest/java/org/opensearch/explain/ExplainActionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/explain/ExplainActionIT.java @@ -63,38 +63,35 @@ public void testSimple() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSettings(Settings.builder().put("index.refresh_interval", -1))); ensureGreen("test"); - client().prepareIndex("test", "test", "1").setSource("field", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field", "value1").get(); - ExplainResponse response = client().prepareExplain(indexOrAlias(), "test", "1").setQuery(QueryBuilders.matchAllQuery()).get(); + ExplainResponse response = client().prepareExplain(indexOrAlias(), "1").setQuery(QueryBuilders.matchAllQuery()).get(); assertNotNull(response); assertFalse(response.isExists()); // not a match b/c not realtime assertThat(response.getIndex(), equalTo("test")); - assertThat(response.getType(), equalTo("test")); assertThat(response.getId(), equalTo("1")); assertFalse(response.isMatch()); // not a match b/c not realtime refresh(); - response = client().prepareExplain(indexOrAlias(), "test", "1").setQuery(QueryBuilders.matchAllQuery()).get(); + response = client().prepareExplain(indexOrAlias(), "1").setQuery(QueryBuilders.matchAllQuery()).get(); assertNotNull(response); assertTrue(response.isMatch()); assertNotNull(response.getExplanation()); assertTrue(response.getExplanation().isMatch()); assertThat(response.getIndex(), equalTo("test")); - assertThat(response.getType(), equalTo("test")); assertThat(response.getId(), equalTo("1")); assertThat(response.getExplanation().getValue(), equalTo(1.0f)); - response = client().prepareExplain(indexOrAlias(), "test", "1").setQuery(QueryBuilders.termQuery("field", "value2")).get(); + response = client().prepareExplain(indexOrAlias(), "1").setQuery(QueryBuilders.termQuery("field", "value2")).get(); assertNotNull(response); assertTrue(response.isExists()); assertFalse(response.isMatch()); assertThat(response.getIndex(), equalTo("test")); - assertThat(response.getType(), equalTo("test")); assertThat(response.getId(), equalTo("1")); assertNotNull(response.getExplanation()); assertFalse(response.getExplanation().isMatch()); - response = client().prepareExplain(indexOrAlias(), "test", "1") + response = client().prepareExplain(indexOrAlias(), "1") .setQuery( QueryBuilders.boolQuery().must(QueryBuilders.termQuery("field", "value1")).must(QueryBuilders.termQuery("field", "value2")) ) @@ -103,18 +100,16 @@ public void testSimple() throws Exception { assertTrue(response.isExists()); assertFalse(response.isMatch()); assertThat(response.getIndex(), equalTo("test")); - assertThat(response.getType(), equalTo("test")); assertThat(response.getId(), equalTo("1")); assertNotNull(response.getExplanation()); assertFalse(response.getExplanation().isMatch()); assertThat(response.getExplanation().getDetails().length, equalTo(2)); - response = client().prepareExplain(indexOrAlias(), "test", "2").setQuery(QueryBuilders.matchAllQuery()).get(); + response = client().prepareExplain(indexOrAlias(), "2").setQuery(QueryBuilders.matchAllQuery()).get(); assertNotNull(response); assertFalse(response.isExists()); assertFalse(response.isMatch()); assertThat(response.getIndex(), equalTo("test")); - assertThat(response.getType(), equalTo("test")); assertThat(response.getId(), equalTo("2")); } @@ -125,14 +120,15 @@ public void testExplainWithFields() throws Exception { ); ensureGreen("test"); - client().prepareIndex("test", "test", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject().startObject("obj1").field("field1", "value1").field("field2", "value2").endObject().endObject() ) .get(); refresh(); - ExplainResponse response = client().prepareExplain(indexOrAlias(), "test", "1") + ExplainResponse response = client().prepareExplain(indexOrAlias(), "1") .setQuery(QueryBuilders.matchAllQuery()) .setStoredFields("obj1.field1") .get(); @@ -149,7 +145,7 @@ public void testExplainWithFields() throws Exception { assertThat(response.getGetResult().isSourceEmpty(), equalTo(true)); refresh(); - response = client().prepareExplain(indexOrAlias(), "test", "1") + response = client().prepareExplain(indexOrAlias(), "1") .setQuery(QueryBuilders.matchAllQuery()) .setStoredFields("obj1.field1") .setFetchSource(true) @@ -166,31 +162,32 @@ public void testExplainWithFields() throws Exception { assertThat(response.getGetResult().getFields().get("obj1.field1").getValue().toString(), equalTo("value1")); assertThat(response.getGetResult().isSourceEmpty(), equalTo(false)); - response = client().prepareExplain(indexOrAlias(), "test", "1") + response = client().prepareExplain(indexOrAlias(), "1") .setQuery(QueryBuilders.matchAllQuery()) .setStoredFields("obj1.field1", "obj1.field2") .get(); assertNotNull(response); assertTrue(response.isMatch()); - String v1 = (String) response.getGetResult().field("obj1.field1").getValue(); - String v2 = (String) response.getGetResult().field("obj1.field2").getValue(); + String v1 = response.getGetResult().field("obj1.field1").getValue(); + String v2 = response.getGetResult().field("obj1.field2").getValue(); assertThat(v1, equalTo("value1")); assertThat(v2, equalTo("value2")); } @SuppressWarnings("unchecked") - public void testExplainWitSource() throws Exception { + public void testExplainWithSource() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias"))); ensureGreen("test"); - client().prepareIndex("test", "test", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject().startObject("obj1").field("field1", "value1").field("field2", "value2").endObject().endObject() ) .get(); refresh(); - ExplainResponse response = client().prepareExplain(indexOrAlias(), "test", "1") + ExplainResponse response = client().prepareExplain(indexOrAlias(), "1") .setQuery(QueryBuilders.matchAllQuery()) .setFetchSource("obj1.field1", null) .get(); @@ -204,7 +201,7 @@ public void testExplainWitSource() throws Exception { assertThat(response.getGetResult().getSource().size(), equalTo(1)); assertThat(((Map) response.getGetResult().getSource().get("obj1")).get("field1").toString(), equalTo("value1")); - response = client().prepareExplain(indexOrAlias(), "test", "1") + response = client().prepareExplain(indexOrAlias(), "1") .setQuery(QueryBuilders.matchAllQuery()) .setFetchSource(null, "obj1.field2") .get(); @@ -213,17 +210,17 @@ public void testExplainWitSource() throws Exception { assertThat(((Map) response.getGetResult().getSource().get("obj1")).get("field1").toString(), equalTo("value1")); } - public void testExplainWithFilteredAlias() throws Exception { + public void testExplainWithFilteredAlias() { assertAcked( prepareCreate("test").addMapping("test", "field2", "type=text") .addAlias(new Alias("alias1").filter(QueryBuilders.termQuery("field2", "value2"))) ); ensureGreen("test"); - client().prepareIndex("test", "test", "1").setSource("field1", "value1", "field2", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value1").get(); refresh(); - ExplainResponse response = client().prepareExplain("alias1", "test", "1").setQuery(QueryBuilders.matchAllQuery()).get(); + ExplainResponse response = client().prepareExplain("alias1", "1").setQuery(QueryBuilders.matchAllQuery()).get(); assertNotNull(response); assertTrue(response.isExists()); assertFalse(response.isMatch()); @@ -239,10 +236,10 @@ public void testExplainWithFilteredAliasFetchSource() throws Exception { ); ensureGreen("test"); - client().prepareIndex("test", "test", "1").setSource("field1", "value1", "field2", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value1").get(); refresh(); - ExplainResponse response = client().prepareExplain("alias1", "test", "1") + ExplainResponse response = client().prepareExplain("alias1", "1") .setQuery(QueryBuilders.matchAllQuery()) .setFetchSource(true) .get(); @@ -251,11 +248,9 @@ public void testExplainWithFilteredAliasFetchSource() throws Exception { assertTrue(response.isExists()); assertFalse(response.isMatch()); assertThat(response.getIndex(), equalTo("test")); - assertThat(response.getType(), equalTo("test")); assertThat(response.getId(), equalTo("1")); assertThat(response.getGetResult(), notNullValue()); assertThat(response.getGetResult().getIndex(), equalTo("test")); - assertThat(response.getGetResult().getType(), equalTo("test")); assertThat(response.getGetResult().getId(), equalTo("1")); assertThat(response.getGetResult().getSource(), notNullValue()); assertThat((String) response.getGetResult().getSource().get("field1"), equalTo("value1")); @@ -268,13 +263,11 @@ public void testExplainDateRangeInQueryString() { String aMonthAgo = DateTimeFormatter.ISO_LOCAL_DATE.format(now.minusMonths(1)); String aMonthFromNow = DateTimeFormatter.ISO_LOCAL_DATE.format(now.plusMonths(1)); - client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); + client().prepareIndex("test").setId("1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); refresh(); - ExplainResponse explainResponse = client().prepareExplain("test", "type", "1") - .setQuery(queryStringQuery("past:[now-2M/d TO now/d]")) - .get(); + ExplainResponse explainResponse = client().prepareExplain("test", "1").setQuery(queryStringQuery("past:[now-2M/d TO now/d]")).get(); assertThat(explainResponse.isExists(), equalTo(true)); assertThat(explainResponse.isMatch(), equalTo(true)); } diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java index f306425fc9458..24aff104ce837 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java @@ -57,7 +57,6 @@ import org.opensearch.common.Priority; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.env.NodeEnvironment; import org.opensearch.index.mapper.MapperParsingException; import org.opensearch.indices.IndexClosedException; @@ -108,14 +107,7 @@ public void testMappingMetadataParsed() throws Exception { .prepareCreate("test") .addMapping( "type1", - XContentFactory.jsonBuilder() - .startObject() - .startObject("type1") - .startObject("_routing") - .field("required", true) - .endObject() - .endObject() - .endObject() + XContentFactory.jsonBuilder().startObject().startObject("_routing").field("required", true).endObject().endObject() ) .execute() .actionGet(); @@ -129,9 +121,8 @@ public void testMappingMetadataParsed() throws Exception { .getState() .metadata() .index("test") - .getMappings() - .get("type1"); - assertThat(mappingMd.routing().required(), equalTo(true)); + .mapping(); + assertThat(mappingMd.routingRequired(), equalTo(true)); logger.info("--> restarting nodes..."); internalCluster().fullRestart(); @@ -140,17 +131,8 @@ public void testMappingMetadataParsed() throws Exception { ensureYellow(); logger.info("--> verify meta _routing required exists"); - mappingMd = client().admin() - .cluster() - .prepareState() - .execute() - .actionGet() - .getState() - .metadata() - .index("test") - .getMappings() - .get("type1"); - assertThat(mappingMd.routing().required(), equalTo(true)); + mappingMd = client().admin().cluster().prepareState().execute().actionGet().getState().metadata().index("test").mapping(); + assertThat(mappingMd.routingRequired(), equalTo(true)); } public void testSimpleOpenClose() throws Exception { @@ -174,7 +156,7 @@ public void testSimpleOpenClose() throws Exception { ); logger.info("--> indexing a simple document"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); logger.info("--> closing test index..."); assertAcked(client().admin().indices().prepareClose("test")); @@ -188,14 +170,14 @@ public void testSimpleOpenClose() throws Exception { logger.info("--> trying to index into a closed index ..."); try { - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").execute().actionGet(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").execute().actionGet(); fail(); } catch (IndexClosedException e) { // all is well } logger.info("--> creating another index (test2) by indexing into it"); - client().prepareIndex("test2", "type1", "1").setSource("field1", "value1").execute().actionGet(); + client().prepareIndex("test2").setId("1").setSource("field1", "value1").execute().actionGet(); logger.info("--> verifying that the state is green"); ensureGreen(); @@ -214,7 +196,7 @@ public void testSimpleOpenClose() throws Exception { ); logger.info("--> trying to get the indexed document on the first index"); - GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); + GetResponse getResponse = client().prepareGet("test", "1").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(true)); logger.info("--> closing test index..."); @@ -234,7 +216,7 @@ public void testSimpleOpenClose() throws Exception { logger.info("--> trying to index into a closed index ..."); try { - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").execute().actionGet(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").execute().actionGet(); fail(); } catch (IndexClosedException e) { // all is well @@ -255,11 +237,11 @@ public void testSimpleOpenClose() throws Exception { ); logger.info("--> trying to get the indexed document on the first round (before close and shutdown)"); - getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); + getResponse = client().prepareGet("test", "1").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(true)); logger.info("--> indexing a simple document"); - client().prepareIndex("test", "type1", "2").setSource("field1", "value1").execute().actionGet(); + client().prepareIndex("test").setId("2").setSource("field1", "value1").execute().actionGet(); } public void testJustMasterNode() throws Exception { @@ -304,7 +286,7 @@ public void testJustMasterNodeAndJustDataNode() { logger.info("--> create an index"); client().admin().indices().prepareCreate("test").execute().actionGet(); - client().prepareIndex("test", "type1").setSource("field1", "value1").execute().actionGet(); + client().prepareIndex("test").setSource("field1", "value1").execute().actionGet(); } public void testTwoNodesSingleDoc() throws Exception { @@ -314,7 +296,7 @@ public void testTwoNodesSingleDoc() throws Exception { internalCluster().startNodes(2); logger.info("--> indexing a simple document"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); logger.info("--> waiting for green status"); ClusterHealthResponse health = client().admin() @@ -429,7 +411,7 @@ public void testRecoverBrokenIndexMetadata() throws Exception { logger.info("--> starting one node"); internalCluster().startNode(); logger.info("--> indexing a simple document"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); logger.info("--> waiting for green status"); if (usually()) { ensureYellow(); @@ -500,23 +482,19 @@ public void testRecoverMissingAnalyzer() throws Exception { prepareCreate("test").setSettings( Settings.builder().put("index.analysis.analyzer.test.tokenizer", "standard").put("index.number_of_shards", "1") ) - .addMapping( - "type1", + .setMapping( "{\n" - + " \"type1\": {\n" - + " \"properties\": {\n" - + " \"field1\": {\n" - + " \"type\": \"text\",\n" - + " \"analyzer\": \"test\"\n" - + " }\n" + + " \"properties\": {\n" + + " \"field1\": {\n" + + " \"type\": \"text\",\n" + + " \"analyzer\": \"test\"\n" + " }\n" + " }\n" - + " }}", - XContentType.JSON + + " }" ) .get(); logger.info("--> indexing a simple document"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value one").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("field1", "value one").setRefreshPolicy(IMMEDIATE).get(); logger.info("--> waiting for green status"); if (usually()) { ensureYellow(); @@ -567,7 +545,7 @@ public void testRecoverMissingAnalyzer() throws Exception { public void testArchiveBrokenClusterSettings() throws Exception { logger.info("--> starting one node"); internalCluster().startNode(); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); logger.info("--> waiting for green status"); if (usually()) { ensureYellow(); diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/MetadataNodesIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/MetadataNodesIT.java index c6f1996bd7eff..c9807aa24e259 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/MetadataNodesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/MetadataNodesIT.java @@ -135,7 +135,6 @@ public void testMetaWrittenWhenIndexIsClosedAndMetaUpdated() throws Exception { client().admin() .indices() .preparePutMapping(index) - .setType("_doc") .setSource( jsonBuilder().startObject() .startObject("properties") @@ -147,20 +146,14 @@ public void testMetaWrittenWhenIndexIsClosedAndMetaUpdated() throws Exception { ) .get(); - GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings(index).addTypes("_doc").get(); + GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings(index).get(); assertNotNull( - ((Map) (getMappingsResponse.getMappings().get(index).get("_doc").getSourceAsMap().get("properties"))).get( - "integer_field" - ) + ((Map) (getMappingsResponse.getMappings().get(index).getSourceAsMap().get("properties"))).get("integer_field") ); // make sure it was also written on red node although index is closed ImmutableOpenMap indicesMetadata = getIndicesMetadataOnNode(dataNode); - assertNotNull( - ((Map) (indicesMetadata.get(index).getMappings().get("_doc").getSourceAsMap().get("properties"))).get( - "integer_field" - ) - ); + assertNotNull(((Map) (indicesMetadata.get(index).mapping().getSourceAsMap().get("properties"))).get("integer_field")); assertThat(indicesMetadata.get(index).getState(), equalTo(IndexMetadata.State.CLOSE)); /* Try the same and see if this also works if node was just restarted. @@ -175,7 +168,6 @@ public void testMetaWrittenWhenIndexIsClosedAndMetaUpdated() throws Exception { client().admin() .indices() .preparePutMapping(index) - .setType("_doc") .setSource( jsonBuilder().startObject() .startObject("properties") @@ -187,18 +179,14 @@ public void testMetaWrittenWhenIndexIsClosedAndMetaUpdated() throws Exception { ) .get(); - getMappingsResponse = client().admin().indices().prepareGetMappings(index).addTypes("_doc").get(); + getMappingsResponse = client().admin().indices().prepareGetMappings(index).get(); assertNotNull( - ((Map) (getMappingsResponse.getMappings().get(index).get("_doc").getSourceAsMap().get("properties"))).get( - "float_field" - ) + ((Map) (getMappingsResponse.getMappings().get(index).getSourceAsMap().get("properties"))).get("float_field") ); // make sure it was also written on red node although index is closed indicesMetadata = getIndicesMetadataOnNode(dataNode); - assertNotNull( - ((Map) (indicesMetadata.get(index).getMappings().get("_doc").getSourceAsMap().get("properties"))).get("float_field") - ); + assertNotNull(((Map) (indicesMetadata.get(index).mapping().getSourceAsMap().get("properties"))).get("float_field")); assertThat(indicesMetadata.get(index).getState(), equalTo(IndexMetadata.State.CLOSE)); // finally check that meta data is also written of index opened again diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/QuorumGatewayIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/QuorumGatewayIT.java index 624c2f1b51b9d..1e190d3bec345 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/QuorumGatewayIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/QuorumGatewayIT.java @@ -66,11 +66,11 @@ public void testQuorumRecovery() throws Exception { final NumShards test = getNumShards("test"); logger.info("--> indexing..."); - client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); + client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); // We don't check for failures in the flush response: if we do we might get the following: // FlushNotAllowedEngineException[[test][1] recovery is in progress, flush [COMMIT_TRANSLOG] is not allowed] flush(); - client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject().field("field", "value2").endObject()).get(); + client().prepareIndex("test").setId("2").setSource(jsonBuilder().startObject().field("field", "value2").endObject()).get(); refresh(); for (int i = 0; i < 10; i++) { @@ -95,7 +95,8 @@ public void doAfterNodes(int numNodes, final Client activeClient) throws Excepti }, 30, TimeUnit.SECONDS); logger.info("--> one node is closed -- index 1 document into the remaining nodes"); - activeClient.prepareIndex("test", "type1", "3") + activeClient.prepareIndex("test") + .setId("3") .setSource(jsonBuilder().startObject().field("field", "value3").endObject()) .get(); assertNoFailures(activeClient.admin().indices().prepareRefresh().get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java index 217c422f9335f..3c5f2828ff94f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java @@ -51,7 +51,6 @@ import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.env.NodeEnvironment; import org.opensearch.index.Index; import org.opensearch.index.IndexService; @@ -115,34 +114,37 @@ public void testOneNodeRecoverFromGateway() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("appAccountIds") .field("type", "text") .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(prepareCreate("test").addMapping("type1", mapping, XContentType.JSON)); + assertAcked(prepareCreate("test").setMapping(mapping)); - client().prepareIndex("test", "type1", "10990239") + client().prepareIndex("test") + .setId("10990239") .setSource(jsonBuilder().startObject().startArray("appAccountIds").value(14).value(179).endArray().endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type1", "10990473") + client().prepareIndex("test") + .setId("10990473") .setSource(jsonBuilder().startObject().startArray("appAccountIds").value(14).endArray().endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type1", "10990513") + client().prepareIndex("test") + .setId("10990513") .setSource(jsonBuilder().startObject().startArray("appAccountIds").value(14).value(179).endArray().endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type1", "10990695") + client().prepareIndex("test") + .setId("10990695") .setSource(jsonBuilder().startObject().startArray("appAccountIds").value(14).endArray().endObject()) .execute() .actionGet(); - client().prepareIndex("test", "type1", "11026351") + client().prepareIndex("test") + .setId("11026351") .setSource(jsonBuilder().startObject().startArray("appAccountIds").value(14).endArray().endObject()) .execute() .actionGet(); @@ -207,7 +209,6 @@ public void testSingleNodeNoFlush() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("field") .field("type", "text") @@ -217,14 +218,13 @@ public void testSingleNodeNoFlush() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); // note: default replica settings are tied to #data nodes-1 which is 0 here. We can do with 1 in this test. int numberOfShards = numberOfShards(); assertAcked( prepareCreate("test").setSettings( Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards()).put(SETTING_NUMBER_OF_REPLICAS, randomIntBetween(0, 1)) - ).addMapping("type1", mapping, XContentType.JSON) + ).setMapping(mapping) ); int value1Docs; @@ -309,12 +309,14 @@ public void testSingleNodeNoFlush() throws Exception { public void testSingleNodeWithFlush() throws Exception { internalCluster().startNode(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("field", "value1").endObject()) .execute() .actionGet(); flush(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("field", "value2").endObject()) .execute() .actionGet(); @@ -352,12 +354,14 @@ public void testTwoNodeFirstNodeCleared() throws Exception { final String firstNode = internalCluster().startNode(); internalCluster().startNode(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("field", "value1").endObject()) .execute() .actionGet(); flush(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("field", "value2").endObject()) .execute() .actionGet(); @@ -408,12 +412,14 @@ public void testLatestVersionLoaded() throws Exception { Settings node2DataPathSettings = internalCluster().dataPathSettings(nodes.get(1)); assertAcked(client().admin().indices().prepareCreate("test")); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("field", "value1").endObject()) .execute() .actionGet(); client().admin().indices().prepareFlush().execute().actionGet(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("field", "value2").endObject()) .execute() .actionGet(); @@ -433,7 +439,8 @@ public void testLatestVersionLoaded() throws Exception { internalCluster().stopRandomDataNode(); logger.info("--> one node is closed - start indexing data into the second one"); - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource(jsonBuilder().startObject().field("field", "value3").endObject()) .execute() .actionGet(); @@ -530,7 +537,7 @@ public void testReuseInFileBasedPeerRecovery() throws Exception { logger.info("--> indexing docs"); int numDocs = randomIntBetween(1, 1024); for (int i = 0; i < numDocs; i++) { - client(primaryNode).prepareIndex("test", "type").setSource("field", "value").execute().actionGet(); + client(primaryNode).prepareIndex("test").setSource("field", "value").execute().actionGet(); } client(primaryNode).admin().indices().prepareFlush("test").setForce(true).get(); @@ -563,7 +570,7 @@ public void testReuseInFileBasedPeerRecovery() throws Exception { public Settings onNodeStopped(String nodeName) throws Exception { // index some more documents; we expect to reuse the files that already exist on the replica for (int i = 0; i < moreDocs; i++) { - client(primaryNode).prepareIndex("test", "type").setSource("field", "value").execute().actionGet(); + client(primaryNode).prepareIndex("test").setSource("field", "value").execute().actionGet(); } // prevent a sequence-number-based recovery from being possible diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/ReplicaShardAllocatorIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/ReplicaShardAllocatorIT.java index e9414fd651ca0..345ed668a3bf4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/ReplicaShardAllocatorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/ReplicaShardAllocatorIT.java @@ -113,7 +113,7 @@ public void testPreferCopyCanPerformNoopRecovery() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, between(100, 500)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); client().admin().indices().prepareFlush(indexName).get(); @@ -123,7 +123,7 @@ public void testPreferCopyCanPerformNoopRecovery() throws Exception { false, randomBoolean(), IntStream.range(0, between(0, 80)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); } @@ -192,7 +192,7 @@ public void testRecentPrimaryInformation() throws Exception { false, randomBoolean(), IntStream.range(0, between(10, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(nodeWithReplica)); @@ -202,7 +202,7 @@ public void testRecentPrimaryInformation() throws Exception { false, randomBoolean(), IntStream.range(0, between(10, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); } @@ -231,7 +231,7 @@ public void testRecentPrimaryInformation() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, between(50, 200)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); client().admin().indices().prepareFlush(indexName).get(); @@ -288,7 +288,7 @@ public void testFullClusterRestartPerformNoopRecovery() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, between(200, 500)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); client().admin().indices().prepareFlush(indexName).get(); @@ -297,7 +297,7 @@ public void testFullClusterRestartPerformNoopRecovery() throws Exception { false, randomBoolean(), IntStream.range(0, between(0, 80)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); if (randomBoolean()) { @@ -350,7 +350,7 @@ public void testPreferCopyWithHighestMatchingOperations() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, between(200, 500)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); client().admin().indices().prepareFlush(indexName).get(); @@ -364,7 +364,7 @@ public void testPreferCopyWithHighestMatchingOperations() throws Exception { false, randomBoolean(), IntStream.range(0, between(1, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); ensureActivePeerRecoveryRetentionLeasesAdvanced(indexName); @@ -376,7 +376,7 @@ public void testPreferCopyWithHighestMatchingOperations() throws Exception { false, randomBoolean(), IntStream.range(0, between(0, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); @@ -422,7 +422,7 @@ public void testDoNotCancelRecoveryForBrokenNode() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, between(200, 500)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); client().admin().indices().prepareFlush(indexName).get(); @@ -474,7 +474,7 @@ public void testPeerRecoveryForClosedIndices() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, randomIntBetween(1, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)) .collect(Collectors.toList()) ); ensureActivePeerRecoveryRetentionLeasesAdvanced(indexName); @@ -536,7 +536,7 @@ public void testSimulateRecoverySourceOnOldNode() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, between(200, 500)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("f", "v")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("f", "v")) .collect(Collectors.toList()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java b/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java index f8079aa1d93f3..ec0b47ccd0ecf 100644 --- a/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java @@ -90,25 +90,25 @@ public void testSimpleGet() { ); ensureGreen(); - GetResponse response = client().prepareGet(indexOrAlias(), "type1", "1").get(); + GetResponse response = client().prepareGet(indexOrAlias(), "1").get(); assertThat(response.isExists(), equalTo(false)); logger.info("--> index doc 1"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2").get(); logger.info("--> non realtime get 1"); - response = client().prepareGet(indexOrAlias(), "type1", "1").setRealtime(false).get(); + response = client().prepareGet(indexOrAlias(), "1").setRealtime(false).get(); assertThat(response.isExists(), equalTo(false)); logger.info("--> realtime get 1"); - response = client().prepareGet(indexOrAlias(), "type1", "1").get(); + response = client().prepareGet(indexOrAlias(), "1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2")); logger.info("--> realtime get 1 (no source, implicit)"); - response = client().prepareGet(indexOrAlias(), "type1", "1").setStoredFields(Strings.EMPTY_ARRAY).get(); + response = client().prepareGet(indexOrAlias(), "1").setStoredFields(Strings.EMPTY_ARRAY).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); Set fields = new HashSet<>(response.getFields().keySet()); @@ -116,7 +116,7 @@ public void testSimpleGet() { assertThat(response.getSourceAsBytes(), nullValue()); logger.info("--> realtime get 1 (no source, explicit)"); - response = client().prepareGet(indexOrAlias(), "type1", "1").setFetchSource(false).get(); + response = client().prepareGet(indexOrAlias(), "1").setFetchSource(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); fields = new HashSet<>(response.getFields().keySet()); @@ -124,14 +124,14 @@ public void testSimpleGet() { assertThat(response.getSourceAsBytes(), nullValue()); logger.info("--> realtime get 1 (no type)"); - response = client().prepareGet(indexOrAlias(), null, "1").get(); + response = client().prepareGet(indexOrAlias(), "1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2")); logger.info("--> realtime fetch of field"); - response = client().prepareGet(indexOrAlias(), "type1", "1").setStoredFields("field1").get(); + response = client().prepareGet(indexOrAlias(), "1").setStoredFields("field1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsBytes(), nullValue()); @@ -139,7 +139,7 @@ public void testSimpleGet() { assertThat(response.getField("field2"), nullValue()); logger.info("--> realtime fetch of field & source"); - response = client().prepareGet(indexOrAlias(), "type1", "1").setStoredFields("field1").setFetchSource("field1", null).get(); + response = client().prepareGet(indexOrAlias(), "1").setStoredFields("field1").setFetchSource("field1", null).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap(), hasKey("field1")); @@ -148,7 +148,7 @@ public void testSimpleGet() { assertThat(response.getField("field2"), nullValue()); logger.info("--> realtime get 1"); - response = client().prepareGet(indexOrAlias(), "type1", "1").get(); + response = client().prepareGet(indexOrAlias(), "1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1")); @@ -158,14 +158,14 @@ public void testSimpleGet() { refresh(); logger.info("--> non realtime get 1 (loaded from index)"); - response = client().prepareGet(indexOrAlias(), "type1", "1").setRealtime(false).get(); + response = client().prepareGet(indexOrAlias(), "1").setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2")); logger.info("--> realtime fetch of field (loaded from index)"); - response = client().prepareGet(indexOrAlias(), "type1", "1").setStoredFields("field1").get(); + response = client().prepareGet(indexOrAlias(), "1").setStoredFields("field1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsBytes(), nullValue()); @@ -173,7 +173,7 @@ public void testSimpleGet() { assertThat(response.getField("field2"), nullValue()); logger.info("--> realtime fetch of field & source (loaded from index)"); - response = client().prepareGet(indexOrAlias(), "type1", "1").setStoredFields("field1").setFetchSource(true).get(); + response = client().prepareGet(indexOrAlias(), "1").setStoredFields("field1").setFetchSource(true).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsBytes(), not(nullValue())); @@ -181,28 +181,28 @@ public void testSimpleGet() { assertThat(response.getField("field2"), nullValue()); logger.info("--> update doc 1"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1_1", "field2", "value2_1").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1_1", "field2", "value2_1").get(); logger.info("--> realtime get 1"); - response = client().prepareGet(indexOrAlias(), "type1", "1").get(); + response = client().prepareGet(indexOrAlias(), "1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1_1")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2_1")); logger.info("--> update doc 1 again"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1_2", "field2", "value2_2").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1_2", "field2", "value2_2").get(); - response = client().prepareGet(indexOrAlias(), "type1", "1").get(); + response = client().prepareGet(indexOrAlias(), "1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1_2")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2_2")); - DeleteResponse deleteResponse = client().prepareDelete("test", "type1", "1").get(); + DeleteResponse deleteResponse = client().prepareDelete("test", "1").get(); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); - response = client().prepareGet(indexOrAlias(), "type1", "1").get(); + response = client().prepareGet(indexOrAlias(), "1").get(); assertThat(response.isExists(), equalTo(false)); } @@ -217,12 +217,12 @@ public void testGetWithAliasPointingToMultipleIndices() { } else { client().admin().indices().prepareCreate("index3").addAlias(new Alias("alias1").indexRouting("1").writeIndex(true)).get(); } - IndexResponse indexResponse = client().prepareIndex("index1", "type", "id").setSource(Collections.singletonMap("foo", "bar")).get(); + IndexResponse indexResponse = client().prepareIndex("index1").setId("id").setSource(Collections.singletonMap("foo", "bar")).get(); assertThat(indexResponse.status().getStatus(), equalTo(RestStatus.CREATED.getStatus())); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, - () -> client().prepareGet("alias1", "type", "_alias_id").get() + () -> client().prepareGet("alias1", "_alias_id").get() ); assertThat(exception.getMessage(), endsWith("can't execute a single index op")); } @@ -239,20 +239,20 @@ public void testSimpleMultiGet() throws Exception { ); ensureGreen(); - MultiGetResponse response = client().prepareMultiGet().add(indexOrAlias(), "type1", "1").get(); + MultiGetResponse response = client().prepareMultiGet().add(indexOrAlias(), "1").get(); assertThat(response.getResponses().length, equalTo(1)); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(false)); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value" + i).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).get(); } response = client().prepareMultiGet() - .add(indexOrAlias(), "type1", "1") - .add(indexOrAlias(), "type1", "15") - .add(indexOrAlias(), "type1", "3") - .add(indexOrAlias(), "type1", "9") - .add(indexOrAlias(), "type1", "11") + .add(indexOrAlias(), "1") + .add(indexOrAlias(), "15") + .add(indexOrAlias(), "3") + .add(indexOrAlias(), "9") + .add(indexOrAlias(), "11") .get(); assertThat(response.getResponses().length, equalTo(5)); assertThat(response.getResponses()[0].getId(), equalTo("1")); @@ -278,8 +278,8 @@ public void testSimpleMultiGet() throws Exception { // multi get with specific field response = client().prepareMultiGet() - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").storedFields("field")) - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "3").storedFields("field")) + .add(new MultiGetRequest.Item(indexOrAlias(), "1").storedFields("field")) + .add(new MultiGetRequest.Item(indexOrAlias(), "3").storedFields("field")) .get(); assertThat(response.getResponses().length, equalTo(2)); @@ -291,7 +291,6 @@ public void testGetDocWithMultivaluedFields() throws Exception { String mapping1 = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("field") .field("type", "text") @@ -299,21 +298,19 @@ public void testGetDocWithMultivaluedFields() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(prepareCreate("test").addMapping("type1", mapping1, XContentType.JSON)); + assertAcked(prepareCreate("test").setMapping(mapping1)); ensureGreen(); - GetResponse response = client().prepareGet("test", "type1", "1").get(); + GetResponse response = client().prepareGet("test", "1").get(); assertThat(response.isExists(), equalTo(false)); assertThat(response.isExists(), equalTo(false)); - client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject().array("field", "1", "2").endObject()).get(); + client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().array("field", "1", "2").endObject()).get(); - response = client().prepareGet("test", "type1", "1").setStoredFields("field").get(); + response = client().prepareGet("test", "1").setStoredFields("field").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); - assertThat(response.getType(), equalTo("type1")); Set fields = new HashSet<>(response.getFields().keySet()); assertThat(fields, equalTo(singleton("field"))); assertThat(response.getFields().get("field").getValues().size(), equalTo(2)); @@ -322,7 +319,7 @@ public void testGetDocWithMultivaluedFields() throws Exception { // Now test values being fetched from stored fields. refresh(); - response = client().prepareGet("test", "type1", "1").setStoredFields("field").get(); + response = client().prepareGet("test", "1").setStoredFields("field").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); fields = new HashSet<>(response.getFields().keySet()); @@ -336,26 +333,26 @@ public void testGetWithVersion() { assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSettings(Settings.builder().put("index.refresh_interval", -1))); ensureGreen(); - GetResponse response = client().prepareGet("test", "type1", "1").get(); + GetResponse response = client().prepareGet("test", "1").get(); assertThat(response.isExists(), equalTo(false)); logger.info("--> index doc 1"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2").get(); // From translog: - response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).get(); + response = client().prepareGet(indexOrAlias(), "1").setVersion(Versions.MATCH_ANY).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getVersion(), equalTo(1L)); - response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).get(); + response = client().prepareGet(indexOrAlias(), "1").setVersion(1).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getVersion(), equalTo(1L)); try { - client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).get(); + client().prepareGet(indexOrAlias(), "1").setVersion(2).get(); fail(); } catch (VersionConflictEngineException e) { // all good @@ -364,44 +361,44 @@ public void testGetWithVersion() { // From Lucene index: refresh(); - response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get(); + response = client().prepareGet(indexOrAlias(), "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(1L)); - response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).setRealtime(false).get(); + response = client().prepareGet(indexOrAlias(), "1").setVersion(1).setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(1L)); try { - client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).setRealtime(false).get(); + client().prepareGet(indexOrAlias(), "1").setVersion(2).setRealtime(false).get(); fail(); } catch (VersionConflictEngineException e) { // all good } logger.info("--> index doc 1 again, so increasing the version"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2").get(); // From translog: - response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).get(); + response = client().prepareGet(indexOrAlias(), "1").setVersion(Versions.MATCH_ANY).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(2L)); try { - client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).get(); + client().prepareGet(indexOrAlias(), "1").setVersion(1).get(); fail(); } catch (VersionConflictEngineException e) { // all good } - response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).get(); + response = client().prepareGet(indexOrAlias(), "1").setVersion(2).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); @@ -410,20 +407,20 @@ public void testGetWithVersion() { // From Lucene index: refresh(); - response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get(); + response = client().prepareGet(indexOrAlias(), "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(2L)); try { - client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).setRealtime(false).get(); + client().prepareGet(indexOrAlias(), "1").setVersion(1).setRealtime(false).get(); fail(); } catch (VersionConflictEngineException e) { // all good } - response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).setRealtime(false).get(); + response = client().prepareGet(indexOrAlias(), "1").setVersion(2).setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); @@ -434,19 +431,19 @@ public void testMultiGetWithVersion() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSettings(Settings.builder().put("index.refresh_interval", -1))); ensureGreen(); - MultiGetResponse response = client().prepareMultiGet().add(indexOrAlias(), "type1", "1").get(); + MultiGetResponse response = client().prepareMultiGet().add(indexOrAlias(), "1").get(); assertThat(response.getResponses().length, equalTo(1)); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(false)); for (int i = 0; i < 3; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value" + i).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).get(); } // Version from translog response = client().prepareMultiGet() - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(Versions.MATCH_ANY)) - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(1)) - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(2)) + .add(new MultiGetRequest.Item(indexOrAlias(), "1").version(Versions.MATCH_ANY)) + .add(new MultiGetRequest.Item(indexOrAlias(), "1").version(1)) + .add(new MultiGetRequest.Item(indexOrAlias(), "1").version(2)) .get(); assertThat(response.getResponses().length, equalTo(3)); // [0] version doesn't matter, which is the default @@ -468,9 +465,9 @@ public void testMultiGetWithVersion() throws Exception { // Version from Lucene index refresh(); response = client().prepareMultiGet() - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(Versions.MATCH_ANY)) - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(1)) - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(2)) + .add(new MultiGetRequest.Item(indexOrAlias(), "1").version(Versions.MATCH_ANY)) + .add(new MultiGetRequest.Item(indexOrAlias(), "1").version(1)) + .add(new MultiGetRequest.Item(indexOrAlias(), "1").version(2)) .setRealtime(false) .get(); assertThat(response.getResponses().length, equalTo(3)); @@ -489,14 +486,14 @@ public void testMultiGetWithVersion() throws Exception { assertThat(response.getResponses()[2].getFailure().getFailure(), instanceOf(VersionConflictEngineException.class)); for (int i = 0; i < 3; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value" + i).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).get(); } // Version from translog response = client().prepareMultiGet() - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(Versions.MATCH_ANY)) - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(1)) - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(2)) + .add(new MultiGetRequest.Item(indexOrAlias(), "2").version(Versions.MATCH_ANY)) + .add(new MultiGetRequest.Item(indexOrAlias(), "2").version(1)) + .add(new MultiGetRequest.Item(indexOrAlias(), "2").version(2)) .get(); assertThat(response.getResponses().length, equalTo(3)); // [0] version doesn't matter, which is the default @@ -518,9 +515,9 @@ public void testMultiGetWithVersion() throws Exception { // Version from Lucene index refresh(); response = client().prepareMultiGet() - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(Versions.MATCH_ANY)) - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(1)) - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(2)) + .add(new MultiGetRequest.Item(indexOrAlias(), "2").version(Versions.MATCH_ANY)) + .add(new MultiGetRequest.Item(indexOrAlias(), "2").version(1)) + .add(new MultiGetRequest.Item(indexOrAlias(), "2").version(2)) .setRealtime(false) .get(); assertThat(response.getResponses().length, equalTo(3)); @@ -563,22 +560,20 @@ public void testGetFieldsNonLeafField() throws Exception { .setSettings(Settings.builder().put("index.refresh_interval", -1)) ); - client().prepareIndex("test", "my-type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().startObject("field1").field("field2", "value1").endObject().endObject()) .get(); IllegalArgumentException exc = expectThrows( IllegalArgumentException.class, - () -> client().prepareGet(indexOrAlias(), "my-type1", "1").setStoredFields("field1").get() + () -> client().prepareGet(indexOrAlias(), "1").setStoredFields("field1").get() ); assertThat(exc.getMessage(), equalTo("field [field1] isn't a leaf field")); flush(); - exc = expectThrows( - IllegalArgumentException.class, - () -> client().prepareGet(indexOrAlias(), "my-type1", "1").setStoredFields("field1").get() - ); + exc = expectThrows(IllegalArgumentException.class, () -> client().prepareGet(indexOrAlias(), "1").setStoredFields("field1").get()); assertThat(exc.getMessage(), equalTo("field [field1] isn't a leaf field")); } @@ -644,18 +639,18 @@ public void testGetFieldsComplexField() throws Exception { logger.info("indexing documents"); - client().prepareIndex("my-index", "my-type", "1").setSource(source, XContentType.JSON).get(); + client().prepareIndex("my-index").setId("1").setSource(source, XContentType.JSON).get(); logger.info("checking real time retrieval"); String field = "field1.field2.field3.field4"; - GetResponse getResponse = client().prepareGet("my-index", "my-type", "1").setStoredFields(field).get(); + GetResponse getResponse = client().prepareGet("my-index", "1").setStoredFields(field).get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1")); assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2")); - getResponse = client().prepareGet("my-index", "my-type", "1").setStoredFields(field).get(); + getResponse = client().prepareGet("my-index", "1").setStoredFields(field).get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1")); @@ -680,7 +675,7 @@ public void testGetFieldsComplexField() throws Exception { logger.info("checking post-flush retrieval"); - getResponse = client().prepareGet("my-index", "my-type", "1").setStoredFields(field).get(); + getResponse = client().prepareGet("my-index", "1").setStoredFields(field).get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1")); @@ -736,7 +731,7 @@ public void testUngeneratedFieldsThatAreAlwaysStored() throws IOException { assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource, XContentType.JSON)); ensureGreen(); - client().prepareIndex("test", "_doc", "1").setRouting("routingValue").setId("1").setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setRouting("routingValue").setId("1").setSource("{}", XContentType.JSON).get(); String[] fieldsList = { "_routing" }; // before refresh - document is only in translog @@ -760,7 +755,7 @@ public void testUngeneratedFieldsNotPartOfSourceStored() throws IOException { assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource, XContentType.JSON)); ensureGreen(); String doc = "{\n" + " \"text\": \"some text.\"\n" + "}\n"; - client().prepareIndex("test", "_doc").setId("1").setSource(doc, XContentType.JSON).setRouting("1").get(); + client().prepareIndex("test").setId("1").setSource(doc, XContentType.JSON).setRouting("1").get(); String[] fieldsList = { "_routing" }; // before refresh - document is only in translog assertGetFieldsAlwaysWorks(indexOrAlias(), "_doc", "1", fieldsList, "1"); @@ -891,7 +886,7 @@ protected void assertGetFieldNull(String index, String type, String docId, Strin } private GetResponse multiGetDocument(String index, String type, String docId, String field, @Nullable String routing) { - MultiGetRequest.Item getItem = new MultiGetRequest.Item(index, type, docId).storedFields(field); + MultiGetRequest.Item getItem = new MultiGetRequest.Item(index, docId).storedFields(field); if (routing != null) { getItem.routing(routing); } @@ -902,7 +897,7 @@ private GetResponse multiGetDocument(String index, String type, String docId, St } private GetResponse getDocument(String index, String type, String docId, String field, @Nullable String routing) { - GetRequestBuilder getRequestBuilder = client().prepareGet().setIndex(index).setType(type).setId(docId).setStoredFields(field); + GetRequestBuilder getRequestBuilder = client().prepareGet().setIndex(index).setId(docId).setStoredFields(field); if (routing != null) { getRequestBuilder.setRouting(routing); } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/FinalPipelineIT.java b/server/src/internalClusterTest/java/org/opensearch/index/FinalPipelineIT.java index 84e1231a7b8b4..3b2695ad7896e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/FinalPipelineIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/FinalPipelineIT.java @@ -107,7 +107,7 @@ public void testFinalPipelineCantChangeDestination() { final IllegalStateException e = expectThrows( IllegalStateException.class, - () -> client().prepareIndex("index", "_doc").setId("1").setSource(Collections.singletonMap("field", "value")).get() + () -> client().prepareIndex("index").setId("1").setSource(Collections.singletonMap("field", "value")).get() ); assertThat(e, hasToString(containsString("final pipeline [final_pipeline] can't change the target index"))); } @@ -128,7 +128,7 @@ public void testFinalPipelineOfOldDestinationIsNotInvoked() { BytesReference finalPipelineBody = new BytesArray("{\"processors\": [{\"final\": {\"exists\":\"no_such_field\"}}]}"); client().admin().cluster().putPipeline(new PutPipelineRequest("final_pipeline", finalPipelineBody, XContentType.JSON)).actionGet(); - IndexResponse indexResponse = client().prepareIndex("index", "_doc") + IndexResponse indexResponse = client().prepareIndex("index") .setId("1") .setSource(Collections.singletonMap("field", "value")) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -155,7 +155,7 @@ public void testFinalPipelineOfNewDestinationIsInvoked() { BytesReference finalPipelineBody = new BytesArray("{\"processors\": [{\"final\": {}}]}"); client().admin().cluster().putPipeline(new PutPipelineRequest("final_pipeline", finalPipelineBody, XContentType.JSON)).actionGet(); - IndexResponse indexResponse = client().prepareIndex("index", "_doc") + IndexResponse indexResponse = client().prepareIndex("index") .setId("1") .setSource(Collections.singletonMap("field", "value")) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -185,7 +185,7 @@ public void testDefaultPipelineOfNewDestinationIsNotInvoked() { .putPipeline(new PutPipelineRequest("target_default_pipeline", targetPipeline, XContentType.JSON)) .actionGet(); - IndexResponse indexResponse = client().prepareIndex("index", "_doc") + IndexResponse indexResponse = client().prepareIndex("index") .setId("1") .setSource(Collections.singletonMap("field", "value")) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -203,7 +203,7 @@ public void testFinalPipeline() { // this asserts that the final_pipeline was used, without us having to actually create the pipeline etc. final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> client().prepareIndex("index", "_doc", "1").setSource(Collections.singletonMap("field", "value")).get() + () -> client().prepareIndex("index").setId("1").setSource(Collections.singletonMap("field", "value")).get() ); assertThat(e, hasToString(containsString("pipeline with id [final_pipeline] does not exist"))); } @@ -218,13 +218,13 @@ public void testRequestPipelineAndFinalPipeline() { client().admin().cluster().putPipeline(new PutPipelineRequest("final_pipeline", finalPipelineBody, XContentType.JSON)).actionGet(); final Settings settings = Settings.builder().put(IndexSettings.FINAL_PIPELINE.getKey(), "final_pipeline").build(); createIndex("index", settings); - final IndexRequestBuilder index = client().prepareIndex("index", "_doc", "1"); + final IndexRequestBuilder index = client().prepareIndex("index").setId("1"); index.setSource(Collections.singletonMap("field", "value")); index.setPipeline("request_pipeline"); index.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); final IndexResponse response = index.get(); assertThat(response.status(), equalTo(RestStatus.CREATED)); - final GetRequestBuilder get = client().prepareGet("index", "_doc", "1"); + final GetRequestBuilder get = client().prepareGet("index", "1"); final GetResponse getResponse = get.get(); assertTrue(getResponse.isExists()); final Map source = getResponse.getSourceAsMap(); @@ -247,12 +247,12 @@ public void testDefaultAndFinalPipeline() { .put(IndexSettings.FINAL_PIPELINE.getKey(), "final_pipeline") .build(); createIndex("index", settings); - final IndexRequestBuilder index = client().prepareIndex("index", "_doc", "1"); + final IndexRequestBuilder index = client().prepareIndex("index").setId("1"); index.setSource(Collections.singletonMap("field", "value")); index.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); final IndexResponse response = index.get(); assertThat(response.status(), equalTo(RestStatus.CREATED)); - final GetRequestBuilder get = client().prepareGet("index", "_doc", "1"); + final GetRequestBuilder get = client().prepareGet("index", "1"); final GetResponse getResponse = get.get(); assertTrue(getResponse.isExists()); final Map source = getResponse.getSourceAsMap(); @@ -297,12 +297,12 @@ public void testDefaultAndFinalPipelineFromTemplates() { .setOrder(finalPipelineOrder) .setSettings(finalPipelineSettings) .get(); - final IndexRequestBuilder index = client().prepareIndex("index", "_doc", "1"); + final IndexRequestBuilder index = client().prepareIndex("index").setId("1"); index.setSource(Collections.singletonMap("field", "value")); index.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); final IndexResponse response = index.get(); assertThat(response.status(), equalTo(RestStatus.CREATED)); - final GetRequestBuilder get = client().prepareGet("index", "_doc", "1"); + final GetRequestBuilder get = client().prepareGet("index", "1"); final GetResponse getResponse = get.get(); assertTrue(getResponse.isExists()); final Map source = getResponse.getSourceAsMap(); @@ -337,7 +337,7 @@ public void testHighOrderFinalPipelinePreferred() throws IOException { // this asserts that the high_order_final_pipeline was selected, without us having to actually create the pipeline etc. final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> client().prepareIndex("index", "_doc", "1").setSource(Collections.singletonMap("field", "value")).get() + () -> client().prepareIndex("index").setId("1").setSource(Collections.singletonMap("field", "value")).get() ); assertThat(e, hasToString(containsString("pipeline with id [high_order_final_pipeline] does not exist"))); } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/HiddenIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/index/HiddenIndexIT.java index 9ee0347142c6e..54fbc8cecb967 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/HiddenIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/HiddenIndexIT.java @@ -63,7 +63,7 @@ public void testHiddenIndexSearch() { assertAcked( client().admin().indices().prepareCreate("hidden-index").setSettings(Settings.builder().put("index.hidden", true).build()).get() ); - client().prepareIndex("hidden-index", "_doc").setSource("foo", "bar").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("hidden-index").setSource("foo", "bar").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); // default not visible to wildcard expansion SearchResponse searchResponse = client().prepareSearch(randomFrom("*", "_all", "h*", "*index")) @@ -95,7 +95,7 @@ public void testHiddenIndexSearch() { .setSettings(Settings.builder().put("index.hidden", true).build()) .get() ); - client().prepareIndex(".hidden-index", "_doc").setSource("foo", "bar").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex(".hidden-index").setSource("foo", "bar").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); searchResponse = client().prepareSearch(randomFrom(".*", ".hidden-*")).setSize(1000).setQuery(QueryBuilders.matchAllQuery()).get(); matchedHidden = Arrays.stream(searchResponse.getHits().getHits()).anyMatch(hit -> ".hidden-index".equals(hit.getIndex())); assertTrue(matchedHidden); @@ -160,7 +160,7 @@ public void testGlobalTemplatesDoNotApply() { GetMappingsResponse mappingsResponse = client().admin().indices().prepareGetMappings("a_hidden_index").get(); assertThat(mappingsResponse.mappings().size(), is(1)); - MappingMetadata mappingMetadata = mappingsResponse.mappings().get("a_hidden_index").get("_doc"); + MappingMetadata mappingMetadata = mappingsResponse.mappings().get("a_hidden_index"); assertNotNull(mappingMetadata); Map propertiesMap = (Map) mappingMetadata.getSourceAsMap().get("properties"); assertNotNull(propertiesMap); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/IndexRequestBuilderIT.java b/server/src/internalClusterTest/java/org/opensearch/index/IndexRequestBuilderIT.java index 75590686fdefe..9432f28a0a59e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/IndexRequestBuilderIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/IndexRequestBuilderIT.java @@ -53,13 +53,13 @@ public void testSetSource() throws InterruptedException, ExecutionException { Map map = new HashMap<>(); map.put("test_field", "foobar"); IndexRequestBuilder[] builders = new IndexRequestBuilder[] { - client().prepareIndex("test", "test").setSource((Object) "test_field", (Object) "foobar"), - client().prepareIndex("test", "test").setSource("{\"test_field\" : \"foobar\"}", XContentType.JSON), - client().prepareIndex("test", "test").setSource(new BytesArray("{\"test_field\" : \"foobar\"}"), XContentType.JSON), - client().prepareIndex("test", "test").setSource(new BytesArray("{\"test_field\" : \"foobar\"}"), XContentType.JSON), - client().prepareIndex("test", "test") + client().prepareIndex("test").setSource("test_field", "foobar"), + client().prepareIndex("test").setSource("{\"test_field\" : \"foobar\"}", XContentType.JSON), + client().prepareIndex("test").setSource(new BytesArray("{\"test_field\" : \"foobar\"}"), XContentType.JSON), + client().prepareIndex("test").setSource(new BytesArray("{\"test_field\" : \"foobar\"}"), XContentType.JSON), + client().prepareIndex("test") .setSource(BytesReference.toBytes(new BytesArray("{\"test_field\" : \"foobar\"}")), XContentType.JSON), - client().prepareIndex("test", "test").setSource(map) }; + client().prepareIndex("test").setSource(map) }; indexRandom(true, builders); SearchResponse searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.termQuery("test_field", "foobar")).get(); OpenSearchAssertions.assertHitCount(searchResponse, builders.length); @@ -67,7 +67,7 @@ public void testSetSource() throws InterruptedException, ExecutionException { public void testOddNumberOfSourceObjects() { try { - client().prepareIndex("test", "test").setSource("test_field", "foobar", new Object()); + client().prepareIndex("test").setSource("test_field", "foobar", new Object()); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("The number of object passed must be even but was [3]")); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java b/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java index 2972cfd7015a1..c0bc9d29af992 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java @@ -94,7 +94,8 @@ public void testIndexSort() { .putList("index.sort.field", "date", "numeric_dv", "keyword_dv") ).addMapping("test", TEST_MAPPING).get(); for (int i = 0; i < 20; i++) { - client().prepareIndex("test", "test", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource("numeric_dv", randomInt(), "keyword_dv", randomAlphaOfLengthBetween(10, 20)) .get(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/WaitUntilRefreshIT.java b/server/src/internalClusterTest/java/org/opensearch/index/WaitUntilRefreshIT.java index 40e6819ee9f9a..e38b128c04fde 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/WaitUntilRefreshIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/WaitUntilRefreshIT.java @@ -83,7 +83,8 @@ public void createTestIndex() { } public void testIndex() { - IndexResponse index = client().prepareIndex("test", "index", "1") + IndexResponse index = client().prepareIndex("test") + .setId("1") .setSource("foo", "bar") .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) .get(); @@ -94,11 +95,11 @@ public void testIndex() { public void testDelete() throws InterruptedException, ExecutionException { // Index normally - indexRandom(true, client().prepareIndex("test", "test", "1").setSource("foo", "bar")); + indexRandom(true, client().prepareIndex("test").setId("1").setSource("foo", "bar")); assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")).get(), "1"); // Now delete with blockUntilRefresh - DeleteResponse delete = client().prepareDelete("test", "test", "1").setRefreshPolicy(RefreshPolicy.WAIT_UNTIL).get(); + DeleteResponse delete = client().prepareDelete("test", "1").setRefreshPolicy(RefreshPolicy.WAIT_UNTIL).get(); assertEquals(DocWriteResponse.Result.DELETED, delete.getResult()); assertFalse("request shouldn't have forced a refresh", delete.forcedRefresh()); assertNoSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")).get()); @@ -106,11 +107,11 @@ public void testDelete() throws InterruptedException, ExecutionException { public void testUpdate() throws InterruptedException, ExecutionException { // Index normally - indexRandom(true, client().prepareIndex("test", "test", "1").setSource("foo", "bar")); + indexRandom(true, client().prepareIndex("test").setId("1").setSource("foo", "bar")); assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")).get(), "1"); // Update with RefreshPolicy.WAIT_UNTIL - UpdateResponse update = client().prepareUpdate("test", "test", "1") + UpdateResponse update = client().prepareUpdate("test", "1") .setDoc(Requests.INDEX_CONTENT_TYPE, "foo", "baz") .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) .get(); @@ -119,7 +120,7 @@ public void testUpdate() throws InterruptedException, ExecutionException { assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "baz")).get(), "1"); // Upsert with RefreshPolicy.WAIT_UNTIL - update = client().prepareUpdate("test", "test", "2") + update = client().prepareUpdate("test", "2") .setDocAsUpsert(true) .setDoc(Requests.INDEX_CONTENT_TYPE, "foo", "cat") .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) @@ -129,7 +130,7 @@ public void testUpdate() throws InterruptedException, ExecutionException { assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "cat")).get(), "2"); // Update-becomes-delete with RefreshPolicy.WAIT_UNTIL - update = client().prepareUpdate("test", "test", "2") + update = client().prepareUpdate("test", "2") .setScript(new Script(ScriptType.INLINE, "mockscript", "delete_plz", emptyMap())) .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) .get(); @@ -141,25 +142,25 @@ public void testUpdate() throws InterruptedException, ExecutionException { public void testBulk() { // Index by bulk with RefreshPolicy.WAIT_UNTIL BulkRequestBuilder bulk = client().prepareBulk().setRefreshPolicy(RefreshPolicy.WAIT_UNTIL); - bulk.add(client().prepareIndex("test", "test", "1").setSource("foo", "bar")); + bulk.add(client().prepareIndex("test").setId("1").setSource("foo", "bar")); assertBulkSuccess(bulk.get()); assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")).get(), "1"); // Update by bulk with RefreshPolicy.WAIT_UNTIL bulk = client().prepareBulk().setRefreshPolicy(RefreshPolicy.WAIT_UNTIL); - bulk.add(client().prepareUpdate("test", "test", "1").setDoc(Requests.INDEX_CONTENT_TYPE, "foo", "baz")); + bulk.add(client().prepareUpdate("test", "1").setDoc(Requests.INDEX_CONTENT_TYPE, "foo", "baz")); assertBulkSuccess(bulk.get()); assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "baz")).get(), "1"); // Delete by bulk with RefreshPolicy.WAIT_UNTIL bulk = client().prepareBulk().setRefreshPolicy(RefreshPolicy.WAIT_UNTIL); - bulk.add(client().prepareDelete("test", "test", "1")); + bulk.add(client().prepareDelete("test", "1")); assertBulkSuccess(bulk.get()); assertNoSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")).get()); // Update makes a noop bulk = client().prepareBulk().setRefreshPolicy(RefreshPolicy.WAIT_UNTIL); - bulk.add(client().prepareDelete("test", "test", "1")); + bulk.add(client().prepareDelete("test", "1")); assertBulkSuccess(bulk.get()); } @@ -169,7 +170,8 @@ public void testBulk() { */ public void testNoRefreshInterval() throws InterruptedException, ExecutionException { client().admin().indices().prepareUpdateSettings("test").setSettings(singletonMap("index.refresh_interval", -1)).get(); - ActionFuture index = client().prepareIndex("test", "index", "1") + ActionFuture index = client().prepareIndex("test") + .setId("1") .setSource("foo", "bar") .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) .execute(); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/engine/InternalEngineMergeIT.java b/server/src/internalClusterTest/java/org/opensearch/index/engine/InternalEngineMergeIT.java index 47d7e974357d8..06ec4dc6d2812 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/engine/InternalEngineMergeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/engine/InternalEngineMergeIT.java @@ -71,7 +71,6 @@ public void testMergesHappening() throws Exception { for (int j = 0; j < numDocs; ++j) { request.add( Requests.indexRequest("test") - .type("type1") .id(Long.toString(id++)) .source(jsonBuilder().startObject().field("l", randomLong()).endObject()) ); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/engine/MaxDocsLimitIT.java b/server/src/internalClusterTest/java/org/opensearch/index/engine/MaxDocsLimitIT.java index 95c03a306a897..da3b30030581f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/engine/MaxDocsLimitIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/engine/MaxDocsLimitIT.java @@ -123,7 +123,7 @@ public void testMaxDocsLimit() throws Exception { assertThat(indexingResult.numSuccess, equalTo(0)); final IllegalArgumentException deleteError = expectThrows( IllegalArgumentException.class, - () -> client().prepareDelete("test", "_doc", "any-id").get() + () -> client().prepareDelete("test", "any-id").get() ); assertThat(deleteError.getMessage(), containsString("Number of documents in the index can't exceed [" + maxDocs.get() + "]")); client().admin().indices().prepareRefresh("test").get(); @@ -206,7 +206,7 @@ static IndexingResult indexDocs(int numRequests, int numThreads) throws Exceptio phaser.arriveAndAwaitAdvance(); while (completedRequests.incrementAndGet() <= numRequests) { try { - final IndexResponse resp = client().prepareIndex("test", "_doc").setSource("{}", XContentType.JSON).get(); + final IndexResponse resp = client().prepareIndex("test").setSource("{}", XContentType.JSON).get(); numSuccess.incrementAndGet(); assertThat(resp.status(), equalTo(RestStatus.CREATED)); } catch (IllegalArgumentException e) { diff --git a/server/src/internalClusterTest/java/org/opensearch/index/fielddata/FieldDataLoadingIT.java b/server/src/internalClusterTest/java/org/opensearch/index/fielddata/FieldDataLoadingIT.java index a60b8241d5ea5..0aa2abed14b79 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/fielddata/FieldDataLoadingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/fielddata/FieldDataLoadingIT.java @@ -60,7 +60,7 @@ public void testEagerGlobalOrdinalsFieldDataLoading() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "type", "1").setSource("name", "name").get(); + client().prepareIndex("test").setId("1").setSource("name", "name").get(); client().admin().indices().prepareRefresh("test").get(); ClusterStatsResponse response = client().admin().cluster().prepareClusterStats().get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java index 192be5f3c4369..f23e319a5e8d2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java @@ -36,7 +36,6 @@ import org.opensearch.common.Strings; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.AggregationBuilders; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; @@ -56,7 +55,7 @@ public void testDynamicTemplateCopyTo() throws Exception { int recordCount = between(1, 200); for (int i = 0; i < recordCount * 2; i++) { - client().prepareIndex("test-idx", "_doc", Integer.toString(i)).setSource("test_field", "test " + i, "even", i % 2 == 0).get(); + client().prepareIndex("test-idx").setId(Integer.toString(i)).setSource("test_field", "test " + i, "even", i % 2 == 0).get(); } client().admin().indices().prepareRefresh("test-idx").execute().actionGet(); @@ -81,7 +80,6 @@ public void testDynamicTemplateCopyTo() throws Exception { public void testDynamicObjectCopyTo() throws Exception { String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("_doc") .startObject("properties") .startObject("foo") .field("type", "text") @@ -89,10 +87,9 @@ public void testDynamicObjectCopyTo() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(client().admin().indices().prepareCreate("test-idx").addMapping("_doc", mapping, XContentType.JSON)); - client().prepareIndex("test-idx", "_doc", "1").setSource("foo", "bar").get(); + assertAcked(client().admin().indices().prepareCreate("test-idx").setMapping(mapping)); + client().prepareIndex("test-idx").setId("1").setSource("foo", "bar").get(); client().admin().indices().prepareRefresh("test-idx").execute().actionGet(); SearchResponse response = client().prepareSearch("test-idx").setQuery(QueryBuilders.termQuery("root.top.child", "bar")).get(); assertThat(response.getHits().getTotalHits().value, equalTo(1L)); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/mapper/DynamicMappingIT.java b/server/src/internalClusterTest/java/org/opensearch/index/mapper/DynamicMappingIT.java index 28d92909a7f93..d5924155e2ec7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/mapper/DynamicMappingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/mapper/DynamicMappingIT.java @@ -38,7 +38,6 @@ import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.ClusterStateUpdateTask; import org.opensearch.cluster.metadata.MappingMetadata; -import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; @@ -68,9 +67,9 @@ protected Collection> nodePlugins() { public void testConflictingDynamicMappings() { // we don't use indexRandom because the order of requests is important here createIndex("index"); - client().prepareIndex("index", "type", "1").setSource("foo", 3).get(); + client().prepareIndex("index").setId("1").setSource("foo", 3).get(); try { - client().prepareIndex("index", "type", "2").setSource("foo", "bar").get(); + client().prepareIndex("index").setId("2").setSource("foo", "bar").get(); fail("Indexing request should have failed!"); } catch (MapperParsingException e) { // general case, the parsing code complains that it can't parse "bar" as a "long" @@ -86,19 +85,17 @@ public void testConflictingDynamicMappings() { public void testConflictingDynamicMappingsBulk() { // we don't use indexRandom because the order of requests is important here createIndex("index"); - client().prepareIndex("index", "type", "1").setSource("foo", 3).get(); - BulkResponse bulkResponse = client().prepareBulk().add(client().prepareIndex("index", "type", "1").setSource("foo", 3)).get(); + client().prepareIndex("index").setId("1").setSource("foo", 3).get(); + BulkResponse bulkResponse = client().prepareBulk().add(client().prepareIndex("index").setId("1").setSource("foo", 3)).get(); assertFalse(bulkResponse.hasFailures()); - bulkResponse = client().prepareBulk().add(client().prepareIndex("index", "type", "2").setSource("foo", "bar")).get(); + bulkResponse = client().prepareBulk().add(client().prepareIndex("index").setId("2").setSource("foo", "bar")).get(); assertTrue(bulkResponse.hasFailures()); } - private static void assertMappingsHaveField(GetMappingsResponse mappings, String index, String type, String field) throws IOException { - ImmutableOpenMap indexMappings = mappings.getMappings().get("index"); + private static void assertMappingsHaveField(GetMappingsResponse mappings, String index, String field) throws IOException { + MappingMetadata indexMappings = mappings.getMappings().get("index"); assertNotNull(indexMappings); - MappingMetadata typeMappings = indexMappings.get(type); - assertNotNull(typeMappings); - Map typeMappingsMap = typeMappings.getSourceAsMap(); + Map typeMappingsMap = indexMappings.getSourceAsMap(); Map properties = (Map) typeMappingsMap.get("properties"); assertTrue("Could not find [" + field + "] in " + typeMappingsMap.toString(), properties.containsKey(field)); } @@ -117,7 +114,7 @@ public void run() { startLatch.await(); assertEquals( DocWriteResponse.Result.CREATED, - client().prepareIndex("index", "type", id).setSource("field" + id, "bar").get().getResult() + client().prepareIndex("index").setId(id).setSource("field" + id, "bar").get().getResult() ); } catch (Exception e) { error.compareAndSet(null, e); @@ -134,19 +131,19 @@ public void run() { throw error.get(); } Thread.sleep(2000); - GetMappingsResponse mappings = client().admin().indices().prepareGetMappings("index").setTypes("type").get(); + GetMappingsResponse mappings = client().admin().indices().prepareGetMappings("index").get(); for (int i = 0; i < indexThreads.length; ++i) { - assertMappingsHaveField(mappings, "index", "type", "field" + i); + assertMappingsHaveField(mappings, "index", "field" + i); } for (int i = 0; i < indexThreads.length; ++i) { - assertTrue(client().prepareGet("index", "type", Integer.toString(i)).get().isExists()); + assertTrue(client().prepareGet("index", Integer.toString(i)).get().isExists()); } } public void testPreflightCheckAvoidsMaster() throws InterruptedException { createIndex("index", Settings.builder().put(INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey(), 2).build()); ensureGreen("index"); - client().prepareIndex("index", MapperService.SINGLE_MAPPING_NAME).setId("1").setSource("field1", "value1").get(); + client().prepareIndex("index").setId("1").setSource("field1", "value1").get(); final CountDownLatch masterBlockedLatch = new CountDownLatch(1); final CountDownLatch indexingCompletedLatch = new CountDownLatch(1); @@ -167,9 +164,7 @@ public void onFailure(String source, Exception e) { }); masterBlockedLatch.await(); - final IndexRequestBuilder indexRequestBuilder = client().prepareIndex("index", MapperService.SINGLE_MAPPING_NAME) - .setId("2") - .setSource("field2", "value2"); + final IndexRequestBuilder indexRequestBuilder = client().prepareIndex("index").setId("2").setSource("field2", "value2"); try { assertThat( expectThrows(IllegalArgumentException.class, () -> indexRequestBuilder.get(TimeValue.timeValueSeconds(10))).getMessage(), @@ -184,7 +179,7 @@ public void testMappingVersionAfterDynamicMappingUpdate() throws Exception { createIndex("test"); final ClusterService clusterService = internalCluster().clusterService(); final long previousVersion = clusterService.state().metadata().index("test").getMappingVersion(); - client().prepareIndex("test", "_doc").setId("1").setSource("field", "text").get(); + client().prepareIndex("test").setId("1").setSource("field", "text").get(); assertBusy(() -> assertThat(clusterService.state().metadata().index("test").getMappingVersion(), equalTo(1 + previousVersion))); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/mapper/MultiFieldsIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/index/mapper/MultiFieldsIntegrationIT.java index 19a9265dc190c..37fa8cdd11a8b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/mapper/MultiFieldsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/mapper/MultiFieldsIntegrationIT.java @@ -61,7 +61,7 @@ public void testMultiFields() throws Exception { assertAcked(client().admin().indices().prepareCreate("my-index").addMapping("my-type", createTypeSource())); GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get(); - MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index").get("my-type"); + MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index"); assertThat(mappingMetadata, not(nullValue())); Map mappingSource = mappingMetadata.sourceAsMap(); Map titleFields = ((Map) XContentMapValues.extractValue("properties.title.fields", mappingSource)); @@ -69,17 +69,17 @@ public void testMultiFields() throws Exception { assertThat(titleFields.get("not_analyzed"), notNullValue()); assertThat(((Map) titleFields.get("not_analyzed")).get("type").toString(), equalTo("keyword")); - client().prepareIndex("my-index", "my-type", "1").setSource("title", "Multi fields").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("my-index").setId("1").setSource("title", "Multi fields").setRefreshPolicy(IMMEDIATE).get(); SearchResponse searchResponse = client().prepareSearch("my-index").setQuery(matchQuery("title", "multi")).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); searchResponse = client().prepareSearch("my-index").setQuery(matchQuery("title.not_analyzed", "Multi fields")).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertAcked(client().admin().indices().preparePutMapping("my-index").setType("my-type").setSource(createPutMappingSource())); + assertAcked(client().admin().indices().preparePutMapping("my-index").setSource(createPutMappingSource())); getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get(); - mappingMetadata = getMappingsResponse.mappings().get("my-index").get("my-type"); + mappingMetadata = getMappingsResponse.mappings().get("my-index"); assertThat(mappingMetadata, not(nullValue())); mappingSource = mappingMetadata.sourceAsMap(); assertThat(((Map) XContentMapValues.extractValue("properties.title", mappingSource)).size(), equalTo(2)); @@ -90,7 +90,7 @@ public void testMultiFields() throws Exception { assertThat(titleFields.get("uncased"), notNullValue()); assertThat(((Map) titleFields.get("uncased")).get("analyzer").toString(), equalTo("whitespace")); - client().prepareIndex("my-index", "my-type", "1").setSource("title", "Multi fields").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("my-index").setId("1").setSource("title", "Multi fields").setRefreshPolicy(IMMEDIATE).get(); searchResponse = client().prepareSearch("my-index").setQuery(matchQuery("title.uncased", "Multi")).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); @@ -101,7 +101,7 @@ public void testGeoPointMultiField() throws Exception { assertAcked(client().admin().indices().prepareCreate("my-index").addMapping("my-type", createMappingSource("geo_point"))); GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get(); - MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index").get("my-type"); + MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index"); assertThat(mappingMetadata, not(nullValue())); Map mappingSource = mappingMetadata.sourceAsMap(); Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource)); @@ -115,7 +115,7 @@ public void testGeoPointMultiField() throws Exception { assertThat(bField.get("type").toString(), equalTo("keyword")); GeoPoint point = new GeoPoint(51, 19); - client().prepareIndex("my-index", "my-type", "1").setSource("a", point.toString()).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("my-index").setId("1").setSource("a", point.toString()).setRefreshPolicy(IMMEDIATE).get(); SearchResponse countResponse = client().prepareSearch("my-index") .setSize(0) .setQuery(constantScoreQuery(geoDistanceQuery("a").point(51, 19).distance(50, DistanceUnit.KILOMETERS))) @@ -130,7 +130,7 @@ public void testCompletionMultiField() throws Exception { assertAcked(client().admin().indices().prepareCreate("my-index").addMapping("my-type", createMappingSource("completion"))); GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get(); - MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index").get("my-type"); + MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index"); assertThat(mappingMetadata, not(nullValue())); Map mappingSource = mappingMetadata.sourceAsMap(); Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource)); @@ -142,7 +142,7 @@ public void testCompletionMultiField() throws Exception { assertThat(bField.size(), equalTo(1)); assertThat(bField.get("type").toString(), equalTo("keyword")); - client().prepareIndex("my-index", "my-type", "1").setSource("a", "complete me").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("my-index").setId("1").setSource("a", "complete me").setRefreshPolicy(IMMEDIATE).get(); SearchResponse countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "complete me")).get(); assertThat(countResponse.getHits().getTotalHits().value, equalTo(1L)); } @@ -152,7 +152,7 @@ public void testIpMultiField() throws Exception { assertAcked(client().admin().indices().prepareCreate("my-index").addMapping("my-type", createMappingSource("ip"))); GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get(); - MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index").get("my-type"); + MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index"); assertThat(mappingMetadata, not(nullValue())); Map mappingSource = mappingMetadata.sourceAsMap(); Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource)); @@ -164,7 +164,7 @@ public void testIpMultiField() throws Exception { assertThat(bField.size(), equalTo(1)); assertThat(bField.get("type").toString(), equalTo("keyword")); - client().prepareIndex("my-index", "my-type", "1").setSource("a", "127.0.0.1").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("my-index").setId("1").setSource("a", "127.0.0.1").setRefreshPolicy(IMMEDIATE).get(); SearchResponse countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "127.0.0.1")).get(); assertThat(countResponse.getHits().getTotalHits().value, equalTo(1L)); } @@ -208,7 +208,6 @@ private XContentBuilder createTypeSource() throws IOException { private XContentBuilder createPutMappingSource() throws IOException { return XContentFactory.jsonBuilder() .startObject() - .startObject("my-type") .startObject("properties") .startObject("title") .field("type", "text") @@ -220,7 +219,6 @@ private XContentBuilder createPutMappingSource() throws IOException { .endObject() .endObject() .endObject() - .endObject() .endObject(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/query/plugin/CustomQueryParserIT.java b/server/src/internalClusterTest/java/org/opensearch/index/query/plugin/CustomQueryParserIT.java index 1d30cef96e012..60e9e28b65005 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/query/plugin/CustomQueryParserIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/query/plugin/CustomQueryParserIT.java @@ -54,7 +54,7 @@ public void setUp() throws Exception { super.setUp(); createIndex("test"); ensureGreen(); - client().prepareIndex("index", "type", "1").setSource("field", "value").get(); + client().prepareIndex("index").setId("1").setSource("field", "value").get(); refresh(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/search/MatchPhraseQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/index/search/MatchPhraseQueryIT.java index d4f4f79dc3408..6d76ee48a5b95 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/search/MatchPhraseQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/search/MatchPhraseQueryIT.java @@ -82,8 +82,8 @@ public void testZeroTermsQuery() throws ExecutionException, InterruptedException private List getIndexRequests() { List requests = new ArrayList<>(); - requests.add(client().prepareIndex(INDEX, "band").setSource("name", "the beatles")); - requests.add(client().prepareIndex(INDEX, "band").setSource("name", "led zeppelin")); + requests.add(client().prepareIndex(INDEX).setSource("name", "the beatles")); + requests.add(client().prepareIndex(INDEX).setSource("name", "led zeppelin")); return requests; } } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/seqno/GlobalCheckpointSyncIT.java b/server/src/internalClusterTest/java/org/opensearch/index/seqno/GlobalCheckpointSyncIT.java index 7ed7c36cb3449..ce7cb81dbd2df 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/seqno/GlobalCheckpointSyncIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/seqno/GlobalCheckpointSyncIT.java @@ -82,7 +82,7 @@ public void testGlobalCheckpointSyncWithAsyncDurability() throws Exception { for (int j = 0; j < 10; j++) { final String id = Integer.toString(j); - client().prepareIndex("test", "test", id).setSource("{\"foo\": " + id + "}", XContentType.JSON).get(); + client().prepareIndex("test").setId(id).setSource("{\"foo\": " + id + "}", XContentType.JSON).get(); } assertBusy(() -> { @@ -194,7 +194,7 @@ private void runGlobalCheckpointSyncTest( } for (int j = 0; j < numberOfDocuments; j++) { final String id = Integer.toString(index * numberOfDocuments + j); - client().prepareIndex("test", "test", id).setSource("{\"foo\": " + id + "}", XContentType.JSON).get(); + client().prepareIndex("test").setId(id).setSource("{\"foo\": " + id + "}", XContentType.JSON).get(); } try { barrier.await(); @@ -251,7 +251,7 @@ public void testPersistGlobalCheckpoint() throws Exception { } int numDocs = randomIntBetween(1, 20); for (int i = 0; i < numDocs; i++) { - client().prepareIndex("test", "test", Integer.toString(i)).setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("{}", XContentType.JSON).get(); } ensureGreen("test"); assertBusy(() -> { @@ -281,7 +281,7 @@ public void testPersistLocalCheckpoint() { logger.info("numDocs {}", numDocs); long maxSeqNo = 0; for (int i = 0; i < numDocs; i++) { - maxSeqNo = client().prepareIndex("test", "_doc").setId(Integer.toString(i)).setSource("{}", XContentType.JSON).get().getSeqNo(); + maxSeqNo = client().prepareIndex("test").setId(Integer.toString(i)).setSource("{}", XContentType.JSON).get().getSeqNo(); logger.info("got {}", maxSeqNo); } for (IndicesService indicesService : internalCluster().getDataNodeInstances(IndicesService.class)) { diff --git a/server/src/internalClusterTest/java/org/opensearch/index/seqno/RetentionLeaseIT.java b/server/src/internalClusterTest/java/org/opensearch/index/seqno/RetentionLeaseIT.java index e1c56129c9f4b..ed6074b39c8a7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/seqno/RetentionLeaseIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/seqno/RetentionLeaseIT.java @@ -43,7 +43,6 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.index.IndexService; import org.opensearch.index.IndexSettings; -import org.opensearch.index.engine.Engine; import org.opensearch.index.shard.IndexShard; import org.opensearch.index.shard.ShardId; import org.opensearch.indices.IndicesService; @@ -122,7 +121,7 @@ public void testRetentionLeasesSyncedOnAdd() throws Exception { final CountDownLatch latch = new CountDownLatch(1); final ActionListener listener = countDownLatchListener(latch); // simulate a peer recovery which locks the soft deletes policy on the primary - final Closeable retentionLock = randomBoolean() ? primary.acquireHistoryRetentionLock(Engine.HistorySource.INDEX) : () -> {}; + final Closeable retentionLock = randomBoolean() ? primary.acquireHistoryRetentionLock() : () -> {}; currentRetentionLeases.put(id, primary.addRetentionLease(id, retainingSequenceNumber, source, listener)); latch.await(); retentionLock.close(); @@ -175,7 +174,7 @@ public void testRetentionLeaseSyncedOnRemove() throws Exception { final CountDownLatch latch = new CountDownLatch(1); final ActionListener listener = countDownLatchListener(latch); // simulate a peer recovery which locks the soft deletes policy on the primary - final Closeable retentionLock = randomBoolean() ? primary.acquireHistoryRetentionLock(Engine.HistorySource.INDEX) : () -> {}; + final Closeable retentionLock = randomBoolean() ? primary.acquireHistoryRetentionLock() : () -> {}; currentRetentionLeases.put(id, primary.addRetentionLease(id, retainingSequenceNumber, source, listener)); latch.await(); retentionLock.close(); @@ -186,7 +185,7 @@ public void testRetentionLeaseSyncedOnRemove() throws Exception { final CountDownLatch latch = new CountDownLatch(1); primary.removeRetentionLease(id, countDownLatchListener(latch)); // simulate a peer recovery which locks the soft deletes policy on the primary - final Closeable retentionLock = randomBoolean() ? primary.acquireHistoryRetentionLock(Engine.HistorySource.INDEX) : () -> {}; + final Closeable retentionLock = randomBoolean() ? primary.acquireHistoryRetentionLock() : () -> {}; currentRetentionLeases.remove(id); latch.await(); retentionLock.close(); @@ -346,22 +345,16 @@ public void testBackgroundRetentionLeaseSync() throws Exception { ) ); } - assertBusy( - () -> { - // check all retention leases have been synced to all replicas - for (final ShardRouting replicaShard : clusterService().state() - .routingTable() - .index("index") - .shard(0) - .replicaShards()) { - final String replicaShardNodeId = replicaShard.currentNodeId(); - final String replicaShardNodeName = clusterService().state().nodes().get(replicaShardNodeId).getName(); - final IndexShard replica = internalCluster().getInstance(IndicesService.class, replicaShardNodeName) - .getShardOrNull(new ShardId(resolveIndex("index"), 0)); - assertThat(replica.getRetentionLeases(), equalTo(primary.getRetentionLeases())); - } + assertBusy(() -> { + // check all retention leases have been synced to all replicas + for (final ShardRouting replicaShard : clusterService().state().routingTable().index("index").shard(0).replicaShards()) { + final String replicaShardNodeId = replicaShard.currentNodeId(); + final String replicaShardNodeName = clusterService().state().nodes().get(replicaShardNodeId).getName(); + final IndexShard replica = internalCluster().getInstance(IndicesService.class, replicaShardNodeName) + .getShardOrNull(new ShardId(resolveIndex("index"), 0)); + assertThat(replica.getRetentionLeases(), equalTo(primary.getRetentionLeases())); } - ); + }); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/shard/GlobalCheckpointListenersIT.java b/server/src/internalClusterTest/java/org/opensearch/index/shard/GlobalCheckpointListenersIT.java index 6fbf218ae0542..a2c5c0333bbfe 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/shard/GlobalCheckpointListenersIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/shard/GlobalCheckpointListenersIT.java @@ -88,7 +88,7 @@ public void accept(final long g, final Exception e) { } }, null); - client().prepareIndex("test", "_doc", Integer.toString(i)).setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("{}", XContentType.JSON).get(); assertBusy(() -> assertThat(globalCheckpoint.get(), equalTo((long) index))); // adding a listener expecting a lower global checkpoint should fire immediately final AtomicLong immediateGlobalCheckpint = new AtomicLong(); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java index 28f5eb023b187..efc522a1f9741 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java @@ -74,6 +74,7 @@ import org.opensearch.index.engine.Engine; import org.opensearch.index.engine.NoOpEngine; import org.opensearch.index.flush.FlushStats; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.SourceToParse; import org.opensearch.index.seqno.RetentionLeaseSyncer; import org.opensearch.index.seqno.SequenceNumbers; @@ -172,7 +173,7 @@ public void testLockTryingToDelete() throws Exception { public void testDurableFlagHasEffect() throws Exception { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "bar", "1").setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{}", XContentType.JSON).get(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService(resolveIndex("test")); IndexShard shard = test.getShardOrNull(0); @@ -192,20 +193,20 @@ public void testDurableFlagHasEffect() throws Exception { setDurability(shard, Translog.Durability.REQUEST); assertFalse(needsSync.test(translog)); setDurability(shard, Translog.Durability.ASYNC); - client().prepareIndex("test", "bar", "2").setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId("2").setSource("{}", XContentType.JSON).get(); assertTrue(needsSync.test(translog)); setDurability(shard, Translog.Durability.REQUEST); - client().prepareDelete("test", "bar", "1").get(); + client().prepareDelete("test", "1").get(); assertFalse(needsSync.test(translog)); setDurability(shard, Translog.Durability.ASYNC); - client().prepareDelete("test", "bar", "2").get(); + client().prepareDelete("test", "2").get(); assertTrue(translog.syncNeeded()); setDurability(shard, Translog.Durability.REQUEST); assertNoFailures( client().prepareBulk() - .add(client().prepareIndex("test", "bar", "3").setSource("{}", XContentType.JSON)) - .add(client().prepareDelete("test", "bar", "1")) + .add(client().prepareIndex("test").setId("3").setSource("{}", XContentType.JSON)) + .add(client().prepareDelete("test", "1")) .get() ); assertFalse(needsSync.test(translog)); @@ -213,8 +214,8 @@ public void testDurableFlagHasEffect() throws Exception { setDurability(shard, Translog.Durability.ASYNC); assertNoFailures( client().prepareBulk() - .add(client().prepareIndex("test", "bar", "4").setSource("{}", XContentType.JSON)) - .add(client().prepareDelete("test", "bar", "3")) + .add(client().prepareIndex("test").setId("4").setSource("{}", XContentType.JSON)) + .add(client().prepareDelete("test", "3")) .get() ); setDurability(shard, Translog.Durability.REQUEST); @@ -251,7 +252,7 @@ public void testIndexDirIsDeletedWhenShardRemoved() throws Exception { Settings idxSettings = Settings.builder().put(IndexMetadata.SETTING_DATA_PATH, idxPath).build(); createIndex("test", idxSettings); ensureGreen("test"); - client().prepareIndex("test", "bar", "1").setSource("{}", XContentType.JSON).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("{}", XContentType.JSON).setRefreshPolicy(IMMEDIATE).get(); SearchResponse response = client().prepareSearch("test").get(); assertHitCount(response, 1L); client().admin().indices().prepareDelete("test").get(); @@ -267,7 +268,7 @@ public void testExpectedShardSizeIsPresent() throws InterruptedException { .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) ); for (int i = 0; i < 50; i++) { - client().prepareIndex("test", "test").setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setSource("{}", XContentType.JSON).get(); } ensureGreen("test"); InternalClusterInfoService clusterInfoService = (InternalClusterInfoService) getInstanceFromNode(ClusterInfoService.class); @@ -286,7 +287,7 @@ public void testIndexCanChangeCustomDataPath() throws Exception { logger.info("--> creating index [{}] with data_path [{}]", index, indexDataPath); createIndex(index, Settings.builder().put(IndexMetadata.SETTING_DATA_PATH, indexDataPath.toAbsolutePath().toString()).build()); - client().prepareIndex(index, "bar", "1").setSource("foo", "bar").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex(index).setId("1").setSource("foo", "bar").setRefreshPolicy(IMMEDIATE).get(); ensureGreen(index); assertHitCount(client().prepareSearch(index).setSize(0).get(), 1L); @@ -365,7 +366,7 @@ public void testMaybeFlush() throws Exception { .build() ) .get(); - client().prepareIndex("test", "_doc") + client().prepareIndex("test") .setId("0") .setSource("{}", XContentType.JSON) .setRefreshPolicy(randomBoolean() ? IMMEDIATE : NONE) @@ -374,7 +375,7 @@ public void testMaybeFlush() throws Exception { shard.applyIndexOperationOnPrimary( Versions.MATCH_ANY, VersionType.INTERNAL, - new SourceToParse("test", "_doc", "1", new BytesArray("{}"), XContentType.JSON), + new SourceToParse("test", "1", new BytesArray("{}"), XContentType.JSON), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, @@ -384,7 +385,8 @@ public void testMaybeFlush() throws Exception { final Translog translog = getTranslog(shard); assertEquals(2, translog.stats().getUncommittedOperations()); assertThat(shard.flushStats().getTotal(), equalTo(0L)); - client().prepareIndex("test", "_doc", "2") + client().prepareIndex("test") + .setId("2") .setSource("{}", XContentType.JSON) .setRefreshPolicy(randomBoolean() ? IMMEDIATE : NONE) .get(); @@ -413,7 +415,7 @@ public void testMaybeFlush() throws Exception { .build() ) .get(); - client().prepareDelete("test", "_doc", "2").get(); + client().prepareDelete("test", "2").get(); logger.info( "--> translog size after delete: [{}] num_ops [{}] generation [{}]", translog.stats().getUncommittedSizeInBytes(), @@ -444,7 +446,7 @@ public void testMaybeRollTranslogGeneration() throws Exception { .put("index.number_of_shards", 1) .put("index.translog.generation_threshold_size", generationThreshold + "b") .build(); - createIndex("test", settings, "test"); + createIndex("test", settings, MapperService.SINGLE_MAPPING_NAME); ensureGreen("test"); final IndicesService indicesService = getInstanceFromNode(IndicesService.class); final IndexService test = indicesService.indexService(resolveIndex("test")); @@ -458,7 +460,7 @@ public void testMaybeRollTranslogGeneration() throws Exception { final Engine.IndexResult result = shard.applyIndexOperationOnPrimary( Versions.MATCH_ANY, VersionType.INTERNAL, - new SourceToParse("test", "test", "1", new BytesArray("{}"), XContentType.JSON), + new SourceToParse("test", "1", new BytesArray("{}"), XContentType.JSON), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, @@ -492,7 +494,8 @@ public void testStressMaybeFlushOrRollTranslogGeneration() throws Exception { settings = Settings.builder().put("index.translog.generation_threshold_size", "117b").build(); } client().admin().indices().prepareUpdateSettings("test").setSettings(settings).get(); - client().prepareIndex("test", "test", "0") + client().prepareIndex("test") + .setId("0") .setSource("{}", XContentType.JSON) .setRefreshPolicy(randomBoolean() ? IMMEDIATE : NONE) .get(); @@ -518,7 +521,7 @@ public void testStressMaybeFlushOrRollTranslogGeneration() throws Exception { final CheckedRunnable check; if (flush) { final FlushStats initialStats = shard.flushStats(); - client().prepareIndex("test", "test", "1").setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{}", XContentType.JSON).get(); check = () -> { assertFalse(shard.shouldPeriodicallyFlush()); final FlushStats currentStats = shard.flushStats(); @@ -543,7 +546,7 @@ public void testStressMaybeFlushOrRollTranslogGeneration() throws Exception { }; } else { final long generation = getTranslog(shard).currentFileGeneration(); - client().prepareIndex("test", "test", "1").setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{}", XContentType.JSON).get(); check = () -> { assertFalse(shard.shouldRollTranslogGeneration()); assertEquals(generation + 1, getTranslog(shard).currentFileGeneration()); @@ -564,7 +567,7 @@ public void testFlushStats() throws Exception { client().admin().indices().prepareUpdateSettings("test").setSettings(settings).get(); final int numDocs = between(10, 100); for (int i = 0; i < numDocs; i++) { - client().prepareIndex("test", "doc", Integer.toString(i)).setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("{}", XContentType.JSON).get(); } // A flush stats may include the new total count but the old period count - assert eventually. assertBusy(() -> { @@ -575,7 +578,7 @@ public void testFlushStats() throws Exception { settings = Settings.builder().put("index.translog.flush_threshold_size", (String) null).build(); client().admin().indices().prepareUpdateSettings("test").setSettings(settings).get(); - client().prepareIndex("test", "doc", UUIDs.randomBase64UUID()).setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId(UUIDs.randomBase64UUID()).setSource("{}", XContentType.JSON).get(); client().admin().indices().prepareFlush("test").setForce(randomBoolean()).setWaitIfOngoing(true).get(); final FlushStats flushStats = client().admin().indices().prepareStats("test").clear().setFlush(true).get().getTotal().flush; assertThat(flushStats.getTotal(), greaterThan(flushStats.getPeriodic())); @@ -587,9 +590,9 @@ public void testShardHasMemoryBufferOnTranslogRecover() throws Throwable { IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService indexService = indicesService.indexService(resolveIndex("test")); IndexShard shard = indexService.getShardOrNull(0); - client().prepareIndex("test", "test", "0").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); - client().prepareDelete("test", "test", "0").get(); - client().prepareIndex("test", "test", "1").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("0").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); + client().prepareDelete("test", "0").get(); + client().prepareIndex("test").setId("1").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).setRefreshPolicy(IMMEDIATE).get(); CheckedFunction wrapper = directoryReader -> directoryReader; shard.close("simon says", false); @@ -701,7 +704,7 @@ public void testInvalidateIndicesRequestCacheWhenRollbackEngine() throws Excepti final SearchRequest countRequest = new SearchRequest("test").source(new SearchSourceBuilder().size(0)); final long numDocs = between(10, 20); for (int i = 0; i < numDocs; i++) { - client().prepareIndex("test", "_doc", Integer.toString(i)).setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("{}", XContentType.JSON).get(); if (randomBoolean()) { shard.refresh("test"); } @@ -723,7 +726,7 @@ public void testInvalidateIndicesRequestCacheWhenRollbackEngine() throws Excepti final long moreDocs = between(10, 20); for (int i = 0; i < moreDocs; i++) { - client().prepareIndex("test", "_doc", Long.toString(i + numDocs)).setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId(Long.toString(i + numDocs)).setSource("{}", XContentType.JSON).get(); if (randomBoolean()) { shard.refresh("test"); } @@ -754,16 +757,14 @@ public void testShardChangesWithDefaultDocType() throws Exception { int numOps = between(1, 10); for (int i = 0; i < numOps; i++) { if (randomBoolean()) { - client().prepareIndex("index", randomFrom("_doc", "user_doc"), randomFrom("1", "2")) - .setSource("{}", XContentType.JSON) - .get(); + client().prepareIndex("index").setId(randomFrom("1", "2")).setSource("{}", XContentType.JSON).get(); } else { - client().prepareDelete("index", randomFrom("_doc", "user_doc"), randomFrom("1", "2")).get(); + client().prepareDelete("index", randomFrom("1", "2")).get(); } } IndexShard shard = indexService.getShard(0); try ( - Translog.Snapshot luceneSnapshot = shard.newChangesSnapshot("test", 0, numOps - 1, true); + Translog.Snapshot luceneSnapshot = shard.newChangesSnapshot("test", 0, numOps - 1, true, randomBoolean()); Translog.Snapshot translogSnapshot = getTranslog(shard).newSnapshot() ) { List opsFromLucene = TestTranslog.drainSnapshot(luceneSnapshot, true); @@ -819,7 +820,7 @@ public void testLimitNumberOfRetainedTranslogFiles() throws Exception { } }; for (int i = 0; i < 100; i++) { - client().prepareIndex(indexName, "_doc", Integer.toString(i)).setSource("{}", XContentType.JSON).get(); + client().prepareIndex(indexName).setId(Integer.toString(i)).setSource("{}", XContentType.JSON).get(); if (randomInt(100) < 10) { client().admin().indices().prepareFlush(indexName).setWaitIfOngoing(true).get(); checkTranslog.run(); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandIT.java b/server/src/internalClusterTest/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandIT.java index 6f2964769f2a7..2dc241e278768 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandIT.java @@ -150,7 +150,7 @@ public void testCorruptIndex() throws Exception { final int numExtraDocs = between(10, 100); IndexRequestBuilder[] builders = new IndexRequestBuilder[numExtraDocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex(indexName, "type").setSource("foo", "bar"); + builders[i] = client().prepareIndex(indexName).setSource("foo", "bar"); } numDocs += numExtraDocs; @@ -326,7 +326,7 @@ public void testCorruptTranslogTruncation() throws Exception { logger.info("--> indexing [{}] docs to be kept", numDocsToKeep); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocsToKeep]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex(indexName, "type").setSource("foo", "bar"); + builders[i] = client().prepareIndex(indexName).setSource("foo", "bar"); } indexRandom(false, false, false, Arrays.asList(builders)); flush(indexName); @@ -337,7 +337,7 @@ public void testCorruptTranslogTruncation() throws Exception { logger.info("--> indexing [{}] more doc to be truncated", numDocsToTruncate); builders = new IndexRequestBuilder[numDocsToTruncate]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex(indexName, "type").setSource("foo", "bar"); + builders[i] = client().prepareIndex(indexName).setSource("foo", "bar"); } indexRandom(false, false, false, Arrays.asList(builders)); @@ -529,7 +529,7 @@ public void testCorruptTranslogTruncationOfReplica() throws Exception { logger.info("--> indexing [{}] docs to be kept", numDocsToKeep); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocsToKeep]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex(indexName, "type").setSource("foo", "bar"); + builders[i] = client().prepareIndex(indexName).setSource("foo", "bar"); } indexRandom(false, false, false, Arrays.asList(builders)); flush(indexName); @@ -539,7 +539,7 @@ public void testCorruptTranslogTruncationOfReplica() throws Exception { logger.info("--> indexing [{}] more docs to be truncated", numDocsToTruncate); builders = new IndexRequestBuilder[numDocsToTruncate]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex(indexName, "type").setSource("foo", "bar"); + builders[i] = client().prepareIndex(indexName).setSource("foo", "bar"); } indexRandom(false, false, false, Arrays.asList(builders)); final int totalDocs = numDocsToKeep + numDocsToTruncate; diff --git a/server/src/internalClusterTest/java/org/opensearch/index/shard/SearchIdleIT.java b/server/src/internalClusterTest/java/org/opensearch/index/shard/SearchIdleIT.java index 21d56ef53c26c..9382960b906e3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/shard/SearchIdleIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/shard/SearchIdleIT.java @@ -102,7 +102,7 @@ private void runTestAutomaticRefresh(final IntToLongFunction count) throws Inter int numDocs = scaledRandomIntBetween(25, 100); totalNumDocs.set(numDocs); CountDownLatch indexingDone = new CountDownLatch(numDocs); - client().prepareIndex("test", "test", "0").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("0").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); indexingDone.countDown(); // one doc is indexed above blocking IndexShard shard = indexService.getShard(0); boolean hasRefreshed = shard.scheduledRefresh(); @@ -133,7 +133,8 @@ private void runTestAutomaticRefresh(final IntToLongFunction count) throws Inter started.await(); assertThat(count.applyAsLong(totalNumDocs.get()), equalTo(1L)); for (int i = 1; i < numDocs; i++) { - client().prepareIndex("test", "test", "" + i) + client().prepareIndex("test") + .setId("" + i) .setSource("{\"foo\" : \"bar\"}", XContentType.JSON) .execute(new ActionListener() { @Override @@ -158,7 +159,7 @@ public void testPendingRefreshWithIntervalChange() throws Exception { IndexService indexService = createIndex("test", builder.build()); assertFalse(indexService.getIndexSettings().isExplicitRefresh()); ensureGreen(); - client().prepareIndex("test", "test", "0").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("0").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); IndexShard shard = indexService.getShard(0); assertFalse(shard.scheduledRefresh()); assertTrue(shard.isSearchIdle()); @@ -166,7 +167,7 @@ public void testPendingRefreshWithIntervalChange() throws Exception { client().admin().indices().prepareRefresh().execute(ActionListener.wrap(refreshLatch::countDown));// async on purpose to make sure // it happens concurrently assertHitCount(client().prepareSearch().get(), 1); - client().prepareIndex("test", "test", "1").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); assertFalse(shard.scheduledRefresh()); assertTrue(shard.hasRefreshPending()); @@ -185,7 +186,7 @@ public void testPendingRefreshWithIntervalChange() throws Exception { // We need to ensure a `scheduledRefresh` triggered by the internal refresh setting update is executed before we index a new doc; // otherwise, it will compete to call `Engine#maybeRefresh` with the `scheduledRefresh` that we are going to verify. ensureNoPendingScheduledRefresh(indexService.getThreadPool()); - client().prepareIndex("test", "test", "2").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("2").setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); assertTrue(shard.scheduledRefresh()); assertFalse(shard.hasRefreshPending()); assertTrue(shard.isSearchIdle()); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java index ece7add2b6937..3a5e21fc8ef65 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java @@ -177,7 +177,7 @@ public void testCorruptFileAndRecover() throws ExecutionException, InterruptedEx disableAllocation("test"); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex("test", "type").setSource("field", "value"); + builders[i] = client().prepareIndex("test").setSource("field", "value"); } indexRandom(true, builders); ensureGreen(); @@ -295,7 +295,7 @@ public void testCorruptPrimaryNoReplica() throws ExecutionException, Interrupted ensureGreen(); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex("test", "type").setSource("field", "value"); + builders[i] = client().prepareIndex("test").setSource("field", "value"); } indexRandom(true, builders); ensureGreen(); @@ -456,7 +456,7 @@ public void testCorruptionOnNetworkLayer() throws ExecutionException, Interrupte ensureGreen(); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex("test", "type").setSource("field", "value"); + builders[i] = client().prepareIndex("test").setSource("field", "value"); } indexRandom(true, builders); ensureGreen(); @@ -561,7 +561,7 @@ public void testCorruptFileThenSnapshotAndRestore() throws ExecutionException, I ensureGreen(); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex("test", "type").setSource("field", "value"); + builders[i] = client().prepareIndex("test").setSource("field", "value"); } indexRandom(true, builders); ensureGreen(); @@ -643,7 +643,7 @@ public void testReplicaCorruption() throws Exception { ensureGreen(); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex("test", "type").setSource("field", "value"); + builders[i] = client().prepareIndex("test").setSource("field", "value"); } indexRandom(true, builders); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedTranslogIT.java b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedTranslogIT.java index 5f39002ac6625..1dd0f6a3d664e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedTranslogIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedTranslogIT.java @@ -88,7 +88,7 @@ public void testCorruptTranslogFiles() throws Exception { // Index some documents IndexRequestBuilder[] builders = new IndexRequestBuilder[scaledRandomIntBetween(100, 1000)]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex("test", "type").setSource("foo", "bar"); + builders[i] = client().prepareIndex("test").setSource("foo", "bar"); } indexRandom(false, false, false, Arrays.asList(builders)); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/store/ExceptionRetryIT.java b/server/src/internalClusterTest/java/org/opensearch/index/store/ExceptionRetryIT.java index 9fe0596357034..3e2091b2065e5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/store/ExceptionRetryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/store/ExceptionRetryIT.java @@ -127,7 +127,7 @@ public void testRetryDueToExceptionOnNetworkLayer() throws ExecutionException, I for (int i = 0; i < numDocs; i++) { XContentBuilder doc = null; doc = jsonBuilder().startObject().field("foo", "bar").endObject(); - bulkBuilder.add(client.prepareIndex("index", "type").setSource(doc)); + bulkBuilder.add(client.prepareIndex("index").setSource(doc)); } BulkResponse response = bulkBuilder.get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/indexing/IndexActionIT.java b/server/src/internalClusterTest/java/org/opensearch/indexing/IndexActionIT.java index ffc8e74875c58..45fbb2651a96d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indexing/IndexActionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indexing/IndexActionIT.java @@ -72,7 +72,7 @@ public void testAutoGenerateIdNoDuplicates() throws Exception { logger.info("indexing [{}] docs", numOfDocs); List builders = new ArrayList<>(numOfDocs); for (int j = 0; j < numOfDocs; j++) { - builders.add(client().prepareIndex("test", "type").setSource("field", "value_" + j)); + builders.add(client().prepareIndex("test").setSource("field", "value_" + j)); } indexRandom(true, builders); logger.info("verifying indexed content"); @@ -99,7 +99,7 @@ public void testAutoGenerateIdNoDuplicates() throws Exception { } try { logger.debug("running search with a specific type"); - SearchResponse response = client().prepareSearch("test").setTypes("type").get(); + SearchResponse response = client().prepareSearch("test").get(); if (response.getHits().getTotalHits().value != numOfDocs) { final String message = "Count is " + response.getHits().getTotalHits().value @@ -128,15 +128,15 @@ public void testCreatedFlag() throws Exception { createIndex("test"); ensureGreen(); - IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").execute().actionGet(); + IndexResponse indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_1").execute().actionGet(); assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); - indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").execute().actionGet(); + indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_2").execute().actionGet(); assertEquals(DocWriteResponse.Result.UPDATED, indexResponse.getResult()); - client().prepareDelete("test", "type", "1").execute().actionGet(); + client().prepareDelete("test", "1").execute().actionGet(); - indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").execute().actionGet(); + indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_2").execute().actionGet(); assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); } @@ -145,14 +145,14 @@ public void testCreatedFlagWithFlush() throws Exception { createIndex("test"); ensureGreen(); - IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").execute().actionGet(); + IndexResponse indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_1").execute().actionGet(); assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); - client().prepareDelete("test", "type", "1").execute().actionGet(); + client().prepareDelete("test", "1").execute().actionGet(); flush(); - indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").execute().actionGet(); + indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_2").execute().actionGet(); assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); } @@ -194,7 +194,8 @@ public void testCreatedFlagWithExternalVersioning() throws Exception { createIndex("test"); ensureGreen(); - IndexResponse indexResponse = client().prepareIndex("test", "type", "1") + IndexResponse indexResponse = client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setVersion(123) .setVersionType(VersionType.EXTERNAL) @@ -208,7 +209,7 @@ public void testCreateFlagWithBulk() { ensureGreen(); BulkResponse bulkResponse = client().prepareBulk() - .add(client().prepareIndex("test", "type", "1").setSource("field1", "value1_1")) + .add(client().prepareIndex("test").setId("1").setSource("field1", "value1_1")) .execute() .actionGet(); assertThat(bulkResponse.hasFailures(), equalTo(false)); @@ -232,7 +233,7 @@ public void testCreateIndexWithLongName() { } try { - client().prepareIndex(randomAlphaOfLengthBetween(min, max).toLowerCase(Locale.ROOT), "mytype").setSource("foo", "bar").get(); + client().prepareIndex(randomAlphaOfLengthBetween(min, max).toLowerCase(Locale.ROOT)).setSource("foo", "bar").get(); fail("exception should have been thrown on too-long index name"); } catch (InvalidIndexNameException e) { assertThat( @@ -247,8 +248,7 @@ public void testCreateIndexWithLongName() { client().prepareIndex( randomAlphaOfLength(MetadataCreateIndexService.MAX_INDEX_NAME_BYTES - 1).toLowerCase(Locale.ROOT) + "Ϟ".toLowerCase( Locale.ROOT - ), - "mytype" + ) ).setSource("foo", "bar").get(); fail("exception should have been thrown on too-long index name"); } catch (InvalidIndexNameException e) { @@ -290,7 +290,7 @@ public void testInvalidIndexName() { public void testDocumentWithBlankFieldName() { MapperParsingException e = expectThrows( MapperParsingException.class, - () -> { client().prepareIndex("test", "type", "1").setSource("", "value1_2").execute().actionGet(); } + () -> { client().prepareIndex("test").setId("1").setSource("", "value1_2").execute().actionGet(); } ); assertThat(e.getMessage(), containsString("failed to parse")); assertThat(e.getRootCause().getMessage(), containsString("field name cannot be an empty string")); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/DateMathIndexExpressionsIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/DateMathIndexExpressionsIntegrationIT.java index ec90d271b9127..7236c32697384 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/DateMathIndexExpressionsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/DateMathIndexExpressionsIntegrationIT.java @@ -71,31 +71,31 @@ public void testIndexNameDateMathExpressions() { String dateMathExp1 = "<.marvel-{now/d}>"; String dateMathExp2 = "<.marvel-{now/d-1d}>"; String dateMathExp3 = "<.marvel-{now/d-2d}>"; - client().prepareIndex(dateMathExp1, "type", "1").setSource("{}", XContentType.JSON).get(); - client().prepareIndex(dateMathExp2, "type", "2").setSource("{}", XContentType.JSON).get(); - client().prepareIndex(dateMathExp3, "type", "3").setSource("{}", XContentType.JSON).get(); + client().prepareIndex(dateMathExp1).setId("1").setSource("{}", XContentType.JSON).get(); + client().prepareIndex(dateMathExp2).setId("2").setSource("{}", XContentType.JSON).get(); + client().prepareIndex(dateMathExp3).setId("3").setSource("{}", XContentType.JSON).get(); refresh(); SearchResponse searchResponse = client().prepareSearch(dateMathExp1, dateMathExp2, dateMathExp3).get(); assertHitCount(searchResponse, 3); assertSearchHits(searchResponse, "1", "2", "3"); - GetResponse getResponse = client().prepareGet(dateMathExp1, "type", "1").get(); + GetResponse getResponse = client().prepareGet(dateMathExp1, "1").get(); assertThat(getResponse.isExists(), is(true)); assertThat(getResponse.getId(), equalTo("1")); - getResponse = client().prepareGet(dateMathExp2, "type", "2").get(); + getResponse = client().prepareGet(dateMathExp2, "2").get(); assertThat(getResponse.isExists(), is(true)); assertThat(getResponse.getId(), equalTo("2")); - getResponse = client().prepareGet(dateMathExp3, "type", "3").get(); + getResponse = client().prepareGet(dateMathExp3, "3").get(); assertThat(getResponse.isExists(), is(true)); assertThat(getResponse.getId(), equalTo("3")); MultiGetResponse mgetResponse = client().prepareMultiGet() - .add(dateMathExp1, "type", "1") - .add(dateMathExp2, "type", "2") - .add(dateMathExp3, "type", "3") + .add(dateMathExp1, "1") + .add(dateMathExp2, "2") + .add(dateMathExp3, "3") .get(); assertThat(mgetResponse.getResponses()[0].getResponse().isExists(), is(true)); assertThat(mgetResponse.getResponses()[0].getResponse().getId(), equalTo("1")); @@ -109,15 +109,15 @@ public void testIndexNameDateMathExpressions() { assertThat(indicesStatsResponse.getIndex(index2), notNullValue()); assertThat(indicesStatsResponse.getIndex(index3), notNullValue()); - DeleteResponse deleteResponse = client().prepareDelete(dateMathExp1, "type", "1").get(); + DeleteResponse deleteResponse = client().prepareDelete(dateMathExp1, "1").get(); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); assertThat(deleteResponse.getId(), equalTo("1")); - deleteResponse = client().prepareDelete(dateMathExp2, "type", "2").get(); + deleteResponse = client().prepareDelete(dateMathExp2, "2").get(); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); assertThat(deleteResponse.getId(), equalTo("2")); - deleteResponse = client().prepareDelete(dateMathExp3, "type", "3").get(); + deleteResponse = client().prepareDelete(dateMathExp3, "3").get(); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); assertThat(deleteResponse.getId(), equalTo("3")); } @@ -131,9 +131,9 @@ public void testAutoCreateIndexWithDateMathExpression() throws Exception { String dateMathExp1 = "<.marvel-{now/d}>"; String dateMathExp2 = "<.marvel-{now/d-1d}>"; String dateMathExp3 = "<.marvel-{now/d-2d}>"; - client().prepareIndex(dateMathExp1, "type", "1").setSource("{}", XContentType.JSON).get(); - client().prepareIndex(dateMathExp2, "type", "2").setSource("{}", XContentType.JSON).get(); - client().prepareIndex(dateMathExp3, "type", "3").setSource("{}", XContentType.JSON).get(); + client().prepareIndex(dateMathExp1).setId("1").setSource("{}", XContentType.JSON).get(); + client().prepareIndex(dateMathExp2).setId("2").setSource("{}", XContentType.JSON).get(); + client().prepareIndex(dateMathExp3).setId("3").setSource("{}", XContentType.JSON).get(); refresh(); SearchResponse searchResponse = client().prepareSearch(dateMathExp1, dateMathExp2, dateMathExp3).get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/IndexingMemoryControllerIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/IndexingMemoryControllerIT.java index 4d91453c296b6..0d3c685ab0327 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/IndexingMemoryControllerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/IndexingMemoryControllerIT.java @@ -121,14 +121,14 @@ public void testDeletesAloneCanTriggerRefresh() throws Exception { ); IndexShard shard = indexService.getShard(0); for (int i = 0; i < 100; i++) { - client().prepareIndex("index", "_doc").setId(Integer.toString(i)).setSource("field", "value").get(); + client().prepareIndex("index").setId(Integer.toString(i)).setSource("field", "value").get(); } // Force merge so we know all merges are done before we start deleting: ForceMergeResponse r = client().admin().indices().prepareForceMerge().setMaxNumSegments(1).execute().actionGet(); assertNoFailures(r); final RefreshStats refreshStats = shard.refreshStats(); for (int i = 0; i < 100; i++) { - client().prepareDelete("index", "_doc", Integer.toString(i)).get(); + client().prepareDelete("index", Integer.toString(i)).get(); } // need to assert busily as IndexingMemoryController refreshes in background assertBusy(() -> assertThat(shard.refreshStats().getTotal(), greaterThan(refreshStats.getTotal() + 1))); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesOptionsIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesOptionsIntegrationIT.java index ef3a45e6755f7..3432cc967bf22 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesOptionsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesOptionsIntegrationIT.java @@ -361,7 +361,7 @@ public void testWildcardBehaviour() throws Exception { verify(getSettings(indices).setIndicesOptions(options), false); assertAcked(prepareCreate("foobar")); - client().prepareIndex("foobar", "type", "1").setSource("k", "v").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("foobar").setId("1").setSource("k", "v").setRefreshPolicy(IMMEDIATE).get(); // Verify defaults for wildcards, with one wildcard expression and one existing index indices = new String[] { "foo*" }; @@ -455,7 +455,7 @@ public void testWildcardBehaviourSnapshotRestore() throws Exception { public void testAllMissingLenient() throws Exception { createIndex("test1"); - client().prepareIndex("test1", "type", "1").setSource("k", "v").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test1").setId("1").setSource("k", "v").setRefreshPolicy(IMMEDIATE).get(); SearchResponse response = client().prepareSearch("test2") .setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setQuery(matchAllQuery()) @@ -595,34 +595,34 @@ public void testPutAliasWildcard() throws Exception { } public void testPutMapping() throws Exception { - verify(client().admin().indices().preparePutMapping("foo").setType("type1").setSource("field", "type=text"), true); - verify(client().admin().indices().preparePutMapping("_all").setType("type1").setSource("field", "type=text"), true); + verify(client().admin().indices().preparePutMapping("foo").setSource("field", "type=text"), true); + verify(client().admin().indices().preparePutMapping("_all").setSource("field", "type=text"), true); for (String index : Arrays.asList("foo", "foobar", "bar", "barbaz")) { assertAcked(prepareCreate(index)); } - verify(client().admin().indices().preparePutMapping("foo").setType("type").setSource("field", "type=text"), false); - assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo").get("type"), notNullValue()); - verify(client().admin().indices().preparePutMapping("b*").setType("type").setSource("field", "type=text"), false); - assertThat(client().admin().indices().prepareGetMappings("bar").get().mappings().get("bar").get("type"), notNullValue()); - assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type"), notNullValue()); - verify(client().admin().indices().preparePutMapping("_all").setType("type").setSource("field", "type=text"), false); - assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo").get("type"), notNullValue()); - assertThat(client().admin().indices().prepareGetMappings("foobar").get().mappings().get("foobar").get("type"), notNullValue()); - assertThat(client().admin().indices().prepareGetMappings("bar").get().mappings().get("bar").get("type"), notNullValue()); - assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type"), notNullValue()); - verify(client().admin().indices().preparePutMapping().setType("type").setSource("field", "type=text"), false); - assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo").get("type"), notNullValue()); - assertThat(client().admin().indices().prepareGetMappings("foobar").get().mappings().get("foobar").get("type"), notNullValue()); - assertThat(client().admin().indices().prepareGetMappings("bar").get().mappings().get("bar").get("type"), notNullValue()); - assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type"), notNullValue()); - - verify(client().admin().indices().preparePutMapping("c*").setType("type").setSource("field", "type=text"), true); + verify(client().admin().indices().preparePutMapping("foo").setSource("field", "type=text"), false); + assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo"), notNullValue()); + verify(client().admin().indices().preparePutMapping("b*").setSource("field", "type=text"), false); + assertThat(client().admin().indices().prepareGetMappings("bar").get().mappings().get("bar"), notNullValue()); + assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz"), notNullValue()); + verify(client().admin().indices().preparePutMapping("_all").setSource("field", "type=text"), false); + assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo"), notNullValue()); + assertThat(client().admin().indices().prepareGetMappings("foobar").get().mappings().get("foobar"), notNullValue()); + assertThat(client().admin().indices().prepareGetMappings("bar").get().mappings().get("bar"), notNullValue()); + assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz"), notNullValue()); + verify(client().admin().indices().preparePutMapping().setSource("field", "type=text"), false); + assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo"), notNullValue()); + assertThat(client().admin().indices().prepareGetMappings("foobar").get().mappings().get("foobar"), notNullValue()); + assertThat(client().admin().indices().prepareGetMappings("bar").get().mappings().get("bar"), notNullValue()); + assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz"), notNullValue()); + + verify(client().admin().indices().preparePutMapping("c*").setSource("field", "type=text"), true); assertAcked(client().admin().indices().prepareClose("barbaz").get()); - verify(client().admin().indices().preparePutMapping("barbaz").setType("type").setSource("field", "type=text"), false); - assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type"), notNullValue()); + verify(client().admin().indices().preparePutMapping("barbaz").setSource("field", "type=text"), false); + assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz"), notNullValue()); } public static final class TestPlugin extends Plugin { diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java index 1724ec6beafdf..18940cba80799 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java @@ -79,8 +79,8 @@ public void testCacheAggs() throws Exception { ); indexRandom( true, - client.prepareIndex("index", "type").setSource("f", "2014-03-10T00:00:00.000Z"), - client.prepareIndex("index", "type").setSource("f", "2014-05-13T00:00:00.000Z") + client.prepareIndex("index").setSource("f", "2014-03-10T00:00:00.000Z"), + client.prepareIndex("index").setSource("f", "2014-05-13T00:00:00.000Z") ); ensureSearchable("index"); @@ -149,15 +149,15 @@ public void testQueryRewrite() throws Exception { ); indexRandom( true, - client.prepareIndex("index", "type", "1").setRouting("1").setSource("s", "2016-03-19"), - client.prepareIndex("index", "type", "2").setRouting("1").setSource("s", "2016-03-20"), - client.prepareIndex("index", "type", "3").setRouting("1").setSource("s", "2016-03-21"), - client.prepareIndex("index", "type", "4").setRouting("2").setSource("s", "2016-03-22"), - client.prepareIndex("index", "type", "5").setRouting("2").setSource("s", "2016-03-23"), - client.prepareIndex("index", "type", "6").setRouting("2").setSource("s", "2016-03-24"), - client.prepareIndex("index", "type", "7").setRouting("3").setSource("s", "2016-03-25"), - client.prepareIndex("index", "type", "8").setRouting("3").setSource("s", "2016-03-26"), - client.prepareIndex("index", "type", "9").setRouting("3").setSource("s", "2016-03-27") + client.prepareIndex("index").setId("1").setRouting("1").setSource("s", "2016-03-19"), + client.prepareIndex("index").setId("2").setRouting("1").setSource("s", "2016-03-20"), + client.prepareIndex("index").setId("3").setRouting("1").setSource("s", "2016-03-21"), + client.prepareIndex("index").setId("4").setRouting("2").setSource("s", "2016-03-22"), + client.prepareIndex("index").setId("5").setRouting("2").setSource("s", "2016-03-23"), + client.prepareIndex("index").setId("6").setRouting("2").setSource("s", "2016-03-24"), + client.prepareIndex("index").setId("7").setRouting("3").setSource("s", "2016-03-25"), + client.prepareIndex("index").setId("8").setRouting("3").setSource("s", "2016-03-26"), + client.prepareIndex("index").setId("9").setRouting("3").setSource("s", "2016-03-27") ); ensureSearchable("index"); assertCacheState(client, "index", 0, 0); @@ -219,15 +219,15 @@ public void testQueryRewriteMissingValues() throws Exception { ); indexRandom( true, - client.prepareIndex("index", "type", "1").setSource("s", "2016-03-19"), - client.prepareIndex("index", "type", "2").setSource("s", "2016-03-20"), - client.prepareIndex("index", "type", "3").setSource("s", "2016-03-21"), - client.prepareIndex("index", "type", "4").setSource("s", "2016-03-22"), - client.prepareIndex("index", "type", "5").setSource("s", "2016-03-23"), - client.prepareIndex("index", "type", "6").setSource("s", "2016-03-24"), - client.prepareIndex("index", "type", "7").setSource("other", "value"), - client.prepareIndex("index", "type", "8").setSource("s", "2016-03-26"), - client.prepareIndex("index", "type", "9").setSource("s", "2016-03-27") + client.prepareIndex("index").setId("1").setSource("s", "2016-03-19"), + client.prepareIndex("index").setId("2").setSource("s", "2016-03-20"), + client.prepareIndex("index").setId("3").setSource("s", "2016-03-21"), + client.prepareIndex("index").setId("4").setSource("s", "2016-03-22"), + client.prepareIndex("index").setId("5").setSource("s", "2016-03-23"), + client.prepareIndex("index").setId("6").setSource("s", "2016-03-24"), + client.prepareIndex("index").setId("7").setSource("other", "value"), + client.prepareIndex("index").setId("8").setSource("s", "2016-03-26"), + client.prepareIndex("index").setId("9").setSource("s", "2016-03-27") ); ensureSearchable("index"); assertCacheState(client, "index", 0, 0); @@ -285,15 +285,15 @@ public void testQueryRewriteDates() throws Exception { ); indexRandom( true, - client.prepareIndex("index", "type", "1").setSource("d", "2014-01-01T00:00:00"), - client.prepareIndex("index", "type", "2").setSource("d", "2014-02-01T00:00:00"), - client.prepareIndex("index", "type", "3").setSource("d", "2014-03-01T00:00:00"), - client.prepareIndex("index", "type", "4").setSource("d", "2014-04-01T00:00:00"), - client.prepareIndex("index", "type", "5").setSource("d", "2014-05-01T00:00:00"), - client.prepareIndex("index", "type", "6").setSource("d", "2014-06-01T00:00:00"), - client.prepareIndex("index", "type", "7").setSource("d", "2014-07-01T00:00:00"), - client.prepareIndex("index", "type", "8").setSource("d", "2014-08-01T00:00:00"), - client.prepareIndex("index", "type", "9").setSource("d", "2014-09-01T00:00:00") + client.prepareIndex("index").setId("1").setSource("d", "2014-01-01T00:00:00"), + client.prepareIndex("index").setId("2").setSource("d", "2014-02-01T00:00:00"), + client.prepareIndex("index").setId("3").setSource("d", "2014-03-01T00:00:00"), + client.prepareIndex("index").setId("4").setSource("d", "2014-04-01T00:00:00"), + client.prepareIndex("index").setId("5").setSource("d", "2014-05-01T00:00:00"), + client.prepareIndex("index").setId("6").setSource("d", "2014-06-01T00:00:00"), + client.prepareIndex("index").setId("7").setSource("d", "2014-07-01T00:00:00"), + client.prepareIndex("index").setId("8").setSource("d", "2014-08-01T00:00:00"), + client.prepareIndex("index").setId("9").setSource("d", "2014-09-01T00:00:00") ); ensureSearchable("index"); assertCacheState(client, "index", 0, 0); @@ -352,15 +352,15 @@ public void testQueryRewriteDatesWithNow() throws Exception { DateFormatter formatter = DateFormatter.forPattern("strict_date_optional_time"); indexRandom( true, - client.prepareIndex("index-1", "type", "1").setSource("d", formatter.format(now)), - client.prepareIndex("index-1", "type", "2").setSource("d", formatter.format(now.minusDays(1))), - client.prepareIndex("index-1", "type", "3").setSource("d", formatter.format(now.minusDays(2))), - client.prepareIndex("index-2", "type", "4").setSource("d", formatter.format(now.minusDays(3))), - client.prepareIndex("index-2", "type", "5").setSource("d", formatter.format(now.minusDays(4))), - client.prepareIndex("index-2", "type", "6").setSource("d", formatter.format(now.minusDays(5))), - client.prepareIndex("index-3", "type", "7").setSource("d", formatter.format(now.minusDays(6))), - client.prepareIndex("index-3", "type", "8").setSource("d", formatter.format(now.minusDays(7))), - client.prepareIndex("index-3", "type", "9").setSource("d", formatter.format(now.minusDays(8))) + client.prepareIndex("index-1").setId("1").setSource("d", formatter.format(now)), + client.prepareIndex("index-1").setId("2").setSource("d", formatter.format(now.minusDays(1))), + client.prepareIndex("index-1").setId("3").setSource("d", formatter.format(now.minusDays(2))), + client.prepareIndex("index-2").setId("4").setSource("d", formatter.format(now.minusDays(3))), + client.prepareIndex("index-2").setId("5").setSource("d", formatter.format(now.minusDays(4))), + client.prepareIndex("index-2").setId("6").setSource("d", formatter.format(now.minusDays(5))), + client.prepareIndex("index-3").setId("7").setSource("d", formatter.format(now.minusDays(6))), + client.prepareIndex("index-3").setId("8").setSource("d", formatter.format(now.minusDays(7))), + client.prepareIndex("index-3").setId("9").setSource("d", formatter.format(now.minusDays(8))) ); ensureSearchable("index-1", "index-2", "index-3"); assertCacheState(client, "index-1", 0, 0); @@ -429,15 +429,15 @@ public void testCanCache() throws Exception { assertAcked(client.admin().indices().prepareCreate("index").addMapping("type", "s", "type=date").setSettings(settings).get()); indexRandom( true, - client.prepareIndex("index", "type", "1").setRouting("1").setSource("s", "2016-03-19"), - client.prepareIndex("index", "type", "2").setRouting("1").setSource("s", "2016-03-20"), - client.prepareIndex("index", "type", "3").setRouting("1").setSource("s", "2016-03-21"), - client.prepareIndex("index", "type", "4").setRouting("2").setSource("s", "2016-03-22"), - client.prepareIndex("index", "type", "5").setRouting("2").setSource("s", "2016-03-23"), - client.prepareIndex("index", "type", "6").setRouting("2").setSource("s", "2016-03-24"), - client.prepareIndex("index", "type", "7").setRouting("3").setSource("s", "2016-03-25"), - client.prepareIndex("index", "type", "8").setRouting("3").setSource("s", "2016-03-26"), - client.prepareIndex("index", "type", "9").setRouting("3").setSource("s", "2016-03-27") + client.prepareIndex("index").setId("1").setRouting("1").setSource("s", "2016-03-19"), + client.prepareIndex("index").setId("2").setRouting("1").setSource("s", "2016-03-20"), + client.prepareIndex("index").setId("3").setRouting("1").setSource("s", "2016-03-21"), + client.prepareIndex("index").setId("4").setRouting("2").setSource("s", "2016-03-22"), + client.prepareIndex("index").setId("5").setRouting("2").setSource("s", "2016-03-23"), + client.prepareIndex("index").setId("6").setRouting("2").setSource("s", "2016-03-24"), + client.prepareIndex("index").setId("7").setRouting("3").setSource("s", "2016-03-25"), + client.prepareIndex("index").setId("8").setRouting("3").setSource("s", "2016-03-26"), + client.prepareIndex("index").setId("9").setRouting("3").setSource("s", "2016-03-27") ); ensureSearchable("index"); assertCacheState(client, "index", 0, 0); @@ -535,10 +535,7 @@ public void testCacheWithFilteredAlias() { .get() ); ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); - client.prepareIndex("index", "type", "1") - .setRouting("1") - .setSource("created_at", DateTimeFormatter.ISO_LOCAL_DATE.format(now)) - .get(); + client.prepareIndex("index").setId("1").setRouting("1").setSource("created_at", DateTimeFormatter.ISO_LOCAL_DATE.format(now)).get(); // Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache ForceMergeResponse forceMergeResponse = client.admin().indices().prepareForceMerge("index").setFlush(true).get(); OpenSearchAssertions.assertAllSuccessful(forceMergeResponse); @@ -590,7 +587,7 @@ public void testProfileDisableCache() throws Exception { ) .get() ); - indexRandom(true, client.prepareIndex("index", "_doc").setSource("k", "hello")); + indexRandom(true, client.prepareIndex("index").setSource("k", "hello")); ensureSearchable("index"); int expectedHits = 0; diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/analyze/AnalyzeActionIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/analyze/AnalyzeActionIT.java index 8c34656c34e99..7218495898677 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/analyze/AnalyzeActionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/analyze/AnalyzeActionIT.java @@ -155,7 +155,7 @@ public void testAnalyzerWithFieldOrTypeTests() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias"))); ensureGreen(); - client().admin().indices().preparePutMapping("test").setType("document").setSource("simple", "type=text,analyzer=simple").get(); + client().admin().indices().preparePutMapping("test").setSource("simple", "type=text,analyzer=simple").get(); for (int i = 0; i < 10; i++) { final AnalyzeRequestBuilder requestBuilder = client().admin().indices().prepareAnalyze("THIS IS A TEST"); @@ -201,7 +201,6 @@ public void testAnalyzerWithMultiValues() throws Exception { client().admin() .indices() .preparePutMapping("test") - .setType("document") .setSource("simple", "type=text,analyzer=simple,position_increment_gap=100") .get(); @@ -304,7 +303,6 @@ public void testDetailAnalyzeWithMultiValues() throws Exception { client().admin() .indices() .preparePutMapping("test") - .setType("document") .setSource("simple", "type=text,analyzer=simple,position_increment_gap=100") .get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/exists/types/TypesExistsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/exists/types/TypesExistsIT.java deleted file mode 100644 index 5d219159e1b5f..0000000000000 --- a/server/src/internalClusterTest/java/org/opensearch/indices/exists/types/TypesExistsIT.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.indices.exists.types; - -import org.opensearch.action.admin.indices.create.CreateIndexResponse; -import org.opensearch.action.admin.indices.exists.types.TypesExistsResponse; -import org.opensearch.client.Client; -import org.opensearch.cluster.metadata.IndexMetadata; -import org.opensearch.index.IndexNotFoundException; -import org.opensearch.plugins.Plugin; -import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.InternalSettingsPlugin; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; - -import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_BLOCKS_READ; -import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_BLOCKS_WRITE; -import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_READ_ONLY; -import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; -import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertBlocked; -import static org.hamcrest.Matchers.equalTo; - -public class TypesExistsIT extends OpenSearchIntegTestCase { - - @Override - protected Collection> nodePlugins() { - return Collections.singleton(InternalSettingsPlugin.class); - } - - public void testSimple() throws Exception { - Client client = client(); - CreateIndexResponse response1 = client.admin() - .indices() - .prepareCreate("test1") - .addMapping("type1", jsonBuilder().startObject().startObject("type1").endObject().endObject()) - .execute() - .actionGet(); - CreateIndexResponse response2 = client.admin() - .indices() - .prepareCreate("test2") - .addMapping("type2", jsonBuilder().startObject().startObject("type2").endObject().endObject()) - .execute() - .actionGet(); - client.admin().indices().prepareAliases().addAlias("test1", "alias1").execute().actionGet(); - assertAcked(response1); - assertAcked(response2); - - TypesExistsResponse response = client.admin().indices().prepareTypesExists("test1").setTypes("type1").execute().actionGet(); - assertThat(response.isExists(), equalTo(true)); - response = client.admin().indices().prepareTypesExists("test1").setTypes("type2").execute().actionGet(); - assertThat(response.isExists(), equalTo(false)); - try { - client.admin().indices().prepareTypesExists("notExist").setTypes("type1").execute().actionGet(); - fail("Exception should have been thrown"); - } catch (IndexNotFoundException e) {} - try { - client.admin().indices().prepareTypesExists("notExist").setTypes("type0").execute().actionGet(); - fail("Exception should have been thrown"); - } catch (IndexNotFoundException e) {} - response = client.admin().indices().prepareTypesExists("alias1").setTypes("type1").execute().actionGet(); - assertThat(response.isExists(), equalTo(true)); - response = client.admin().indices().prepareTypesExists("*").setTypes("type1").execute().actionGet(); - assertThat(response.isExists(), equalTo(false)); - response = client.admin().indices().prepareTypesExists("test1", "test2").setTypes("type1").execute().actionGet(); - assertThat(response.isExists(), equalTo(false)); - response = client.admin().indices().prepareTypesExists("test1", "test2").setTypes("type2").execute().actionGet(); - assertThat(response.isExists(), equalTo(false)); - } - - public void testTypesExistsWithBlocks() throws IOException { - assertAcked(prepareCreate("ro").addMapping("type1", jsonBuilder().startObject().startObject("type1").endObject().endObject())); - ensureGreen("ro"); - - // Request is not blocked - for (String block : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY)) { - try { - enableIndexBlock("ro", block); - assertThat( - client().admin().indices().prepareTypesExists("ro").setTypes("type1").execute().actionGet().isExists(), - equalTo(true) - ); - } finally { - disableIndexBlock("ro", block); - } - } - - // Request is blocked - try { - enableIndexBlock("ro", IndexMetadata.SETTING_BLOCKS_METADATA); - assertBlocked(client().admin().indices().prepareTypesExists("ro").setTypes("type1")); - } finally { - disableIndexBlock("ro", IndexMetadata.SETTING_BLOCKS_METADATA); - } - } -} diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/ConcurrentDynamicTemplateIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/ConcurrentDynamicTemplateIT.java index ef7fff331f0c2..e731b0074f04d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/ConcurrentDynamicTemplateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/ConcurrentDynamicTemplateIT.java @@ -34,7 +34,6 @@ import org.opensearch.action.ActionListener; import org.opensearch.action.index.IndexResponse; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.query.QueryBuilders; import org.opensearch.test.OpenSearchIntegTestCase; @@ -49,14 +48,10 @@ import static org.hamcrest.Matchers.emptyIterable; public class ConcurrentDynamicTemplateIT extends OpenSearchIntegTestCase { - private final String mappingType = "test-mapping"; - // see #3544 public void testConcurrentDynamicMapping() throws Exception { final String fieldName = "field"; - final String mapping = "{ \"" - + mappingType - + "\": {" + final String mapping = "{ " + "\"dynamic_templates\": [" + "{ \"" + fieldName @@ -65,14 +60,14 @@ public void testConcurrentDynamicMapping() throws Exception { + "\"mapping\": {" + "\"type\": \"text\"," + "\"store\": true," - + "\"analyzer\": \"whitespace\" } } } ] } }"; + + "\"analyzer\": \"whitespace\" } } } ] }"; // The 'fieldNames' array is used to help with retrieval of index terms // after testing int iters = scaledRandomIntBetween(5, 15); for (int i = 0; i < iters; i++) { cluster().wipeIndices("test"); - assertAcked(prepareCreate("test").addMapping(mappingType, mapping, XContentType.JSON)); + assertAcked(prepareCreate("test").setMapping(mapping)); int numDocs = scaledRandomIntBetween(10, 100); final CountDownLatch latch = new CountDownLatch(numDocs); final List throwable = new CopyOnWriteArrayList<>(); @@ -80,7 +75,8 @@ public void testConcurrentDynamicMapping() throws Exception { for (int j = 0; j < numDocs; j++) { Map source = new HashMap<>(); source.put(fieldName, "test-user"); - client().prepareIndex("test", mappingType, Integer.toString(currentID++)) + client().prepareIndex("test") + .setId(Integer.toString(currentID++)) .setSource(source) .execute(new ActionListener() { @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java index 52e2fe303c377..92a65f43361d6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java @@ -77,7 +77,7 @@ public void testGetMappingsWhereThereAreNone() { assertThat(response.mappings().size(), equalTo(1)); assertThat(response.mappings().get("index").size(), equalTo(0)); - assertThat(response.fieldMappings("index", "type", "field"), nullValue()); + assertThat(response.fieldMappings("index", "field"), nullValue()); } private XContentBuilder getMappingForType(String type) throws IOException { @@ -112,55 +112,33 @@ public void testGetFieldMappings() throws Exception { GetFieldMappingsResponse response = client().admin() .indices() .prepareGetFieldMappings("indexa") - .setTypes("typeA") .setFields("field1", "obj.subfield") .get(); - assertThat(response.fieldMappings("indexa", "typeA", "field1").fullName(), equalTo("field1")); - assertThat(response.fieldMappings("indexa", "typeA", "field1").sourceAsMap(), hasKey("field1")); - assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield")); - assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield")); - assertThat(response.fieldMappings("indexb", "typeB", "field1"), nullValue()); + assertThat(response.fieldMappings("indexa", "field1").fullName(), equalTo("field1")); + assertThat(response.fieldMappings("indexa", "field1").sourceAsMap(), hasKey("field1")); + assertThat(response.fieldMappings("indexa", "obj.subfield").fullName(), equalTo("obj.subfield")); + assertThat(response.fieldMappings("indexa", "obj.subfield").sourceAsMap(), hasKey("subfield")); // Get mappings by name - response = client().admin().indices().prepareGetFieldMappings("indexa").setTypes("typeA").setFields("field1", "obj.subfield").get(); - assertThat(response.fieldMappings("indexa", "typeA", "field1").fullName(), equalTo("field1")); - assertThat(response.fieldMappings("indexa", "typeA", "field1").sourceAsMap(), hasKey("field1")); - assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield")); - assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield")); - assertThat(response.fieldMappings("indexa", "typeB", "field1"), nullValue()); - assertThat(response.fieldMappings("indexb", "typeB", "field1"), nullValue()); + response = client().admin().indices().prepareGetFieldMappings("indexa").setFields("field1", "obj.subfield").get(); + assertThat(response.fieldMappings("indexa", "field1").fullName(), equalTo("field1")); + assertThat(response.fieldMappings("indexa", "field1").sourceAsMap(), hasKey("field1")); + assertThat(response.fieldMappings("indexa", "obj.subfield").fullName(), equalTo("obj.subfield")); + assertThat(response.fieldMappings("indexa", "obj.subfield").sourceAsMap(), hasKey("subfield")); // get mappings by name across multiple indices - response = client().admin().indices().prepareGetFieldMappings().setTypes("typeA").setFields("obj.subfield").get(); - assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield")); - assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield")); - assertThat(response.fieldMappings("indexa", "typeB", "obj.subfield"), nullValue()); - assertThat(response.fieldMappings("indexb", "typeB", "obj.subfield"), nullValue()); - - // get mappings by name across multiple types - response = client().admin().indices().prepareGetFieldMappings("indexa").setFields("obj.subfield").get(); - assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield")); - assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield")); - assertThat(response.fieldMappings("indexa", "typeA", "field1"), nullValue()); - assertThat(response.fieldMappings("indexb", "typeB", "obj.subfield"), nullValue()); - assertThat(response.fieldMappings("indexb", "typeB", "field1"), nullValue()); - - // get mappings by name across multiple types & indices response = client().admin().indices().prepareGetFieldMappings().setFields("obj.subfield").get(); - assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield")); - assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield")); - assertThat(response.fieldMappings("indexa", "typeA", "field1"), nullValue()); - assertThat(response.fieldMappings("indexb", "typeB", "field1"), nullValue()); - assertThat(response.fieldMappings("indexb", "typeB", "obj.subfield").fullName(), equalTo("obj.subfield")); - assertThat(response.fieldMappings("indexb", "typeB", "obj.subfield").sourceAsMap(), hasKey("subfield")); - assertThat(response.fieldMappings("indexb", "typeB", "field1"), nullValue()); + assertThat(response.fieldMappings("indexa", "obj.subfield").fullName(), equalTo("obj.subfield")); + assertThat(response.fieldMappings("indexa", "obj.subfield").sourceAsMap(), hasKey("subfield")); + assertThat(response.fieldMappings("indexb", "obj.subfield").fullName(), equalTo("obj.subfield")); + assertThat(response.fieldMappings("indexb", "obj.subfield").sourceAsMap(), hasKey("subfield")); } @SuppressWarnings("unchecked") public void testSimpleGetFieldMappingsWithDefaults() throws Exception { assertAcked(prepareCreate("test").addMapping("type", getMappingForType("type"))); - client().admin().indices().preparePutMapping("test").setType("type").setSource("num", "type=long").get(); - client().admin().indices().preparePutMapping("test").setType("type").setSource("field2", "type=text,index=false").get(); + client().admin().indices().preparePutMapping("test").setSource("num", "type=long").get(); + client().admin().indices().preparePutMapping("test").setSource("field2", "type=text,index=false").get(); GetFieldMappingsResponse response = client().admin() .indices() @@ -169,25 +147,16 @@ public void testSimpleGetFieldMappingsWithDefaults() throws Exception { .includeDefaults(true) .get(); + assertThat((Map) response.fieldMappings("test", "num").sourceAsMap().get("num"), hasEntry("index", Boolean.TRUE)); + assertThat((Map) response.fieldMappings("test", "num").sourceAsMap().get("num"), hasEntry("type", "long")); assertThat( - (Map) response.fieldMappings("test", "type", "num").sourceAsMap().get("num"), + (Map) response.fieldMappings("test", "field1").sourceAsMap().get("field1"), hasEntry("index", Boolean.TRUE) ); - assertThat((Map) response.fieldMappings("test", "type", "num").sourceAsMap().get("num"), hasEntry("type", "long")); + assertThat((Map) response.fieldMappings("test", "field1").sourceAsMap().get("field1"), hasEntry("type", "text")); + assertThat((Map) response.fieldMappings("test", "field2").sourceAsMap().get("field2"), hasEntry("type", "text")); assertThat( - (Map) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"), - hasEntry("index", Boolean.TRUE) - ); - assertThat( - (Map) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"), - hasEntry("type", "text") - ); - assertThat( - (Map) response.fieldMappings("test", "type", "field2").sourceAsMap().get("field2"), - hasEntry("type", "text") - ); - assertThat( - (Map) response.fieldMappings("test", "type", "obj.subfield").sourceAsMap().get("subfield"), + (Map) response.fieldMappings("test", "obj.subfield").sourceAsMap().get("subfield"), hasEntry("type", "keyword") ); } @@ -198,12 +167,12 @@ public void testGetFieldMappingsWithFieldAlias() throws Exception { GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings().setFields("alias", "field1").get(); - FieldMappingMetadata aliasMapping = response.fieldMappings("test", "type", "alias"); + FieldMappingMetadata aliasMapping = response.fieldMappings("test", "alias"); assertThat(aliasMapping.fullName(), equalTo("alias")); assertThat(aliasMapping.sourceAsMap(), hasKey("alias")); assertThat((Map) aliasMapping.sourceAsMap().get("alias"), hasEntry("type", "alias")); - FieldMappingMetadata field1Mapping = response.fieldMappings("test", "type", "field1"); + FieldMappingMetadata field1Mapping = response.fieldMappings("test", "field1"); assertThat(field1Mapping.fullName(), equalTo("field1")); assertThat(field1Mapping.sourceAsMap(), hasKey("field1")); } @@ -216,7 +185,6 @@ public void testSimpleGetFieldMappingsWithPretty() throws Exception { GetFieldMappingsResponse response = client().admin() .indices() .prepareGetFieldMappings("index") - .setTypes("type") .setFields("field1", "obj.subfield") .get(); XContentBuilder responseBuilder = XContentFactory.jsonBuilder().prettyPrint(); @@ -229,7 +197,7 @@ public void testSimpleGetFieldMappingsWithPretty() throws Exception { params.put("pretty", "false"); - response = client().admin().indices().prepareGetFieldMappings("index").setTypes("type").setFields("field1", "obj.subfield").get(); + response = client().admin().indices().prepareGetFieldMappings("index").setFields("field1", "obj.subfield").get(); responseBuilder = XContentFactory.jsonBuilder().prettyPrint().lfAtEnd(); response.toXContent(responseBuilder, new ToXContent.MapParams(params)); responseStrings = Strings.toString(responseBuilder); @@ -249,10 +217,9 @@ public void testGetFieldMappingsWithBlocks() throws Exception { GetFieldMappingsResponse response = client().admin() .indices() .prepareGetFieldMappings("test") - .setTypes("_doc") .setFields("field1", "obj.subfield") .get(); - assertThat(response.fieldMappings("test", "_doc", "field1").fullName(), equalTo("field1")); + assertThat(response.fieldMappings("test", "field1").fullName(), equalTo("field1")); } finally { disableIndexBlock("test", block); } diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetMappingsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetMappingsIT.java index cfd4a830e5c37..aac12522afa2f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetMappingsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetMappingsIT.java @@ -34,6 +34,7 @@ import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; import org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse; +import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.Priority; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.plugins.Plugin; @@ -66,7 +67,7 @@ public void testGetMappingsWhereThereAreNone() { createIndex("index"); GetMappingsResponse response = client().admin().indices().prepareGetMappings().execute().actionGet(); assertThat(response.mappings().containsKey("index"), equalTo(true)); - assertThat(response.mappings().get("index").size(), equalTo(0)); + assertEquals(MappingMetadata.EMPTY_MAPPINGS, response.mappings().get("index")); } private XContentBuilder getMappingForType(String type) throws IOException { @@ -97,50 +98,19 @@ public void testSimpleGetMappings() throws Exception { // Get all mappings GetMappingsResponse response = client().admin().indices().prepareGetMappings().execute().actionGet(); assertThat(response.mappings().size(), equalTo(2)); - assertThat(response.mappings().get("indexa").size(), equalTo(1)); - assertThat(response.mappings().get("indexa").get("typeA"), notNullValue()); - assertThat(response.mappings().get("indexb").size(), equalTo(1)); - assertThat(response.mappings().get("indexb").get("typeA"), notNullValue()); + assertThat(response.mappings().get("indexa"), notNullValue()); + assertThat(response.mappings().get("indexb"), notNullValue()); // Get all mappings, via wildcard support - response = client().admin().indices().prepareGetMappings("*").setTypes("*").execute().actionGet(); + response = client().admin().indices().prepareGetMappings("*").execute().actionGet(); assertThat(response.mappings().size(), equalTo(2)); - assertThat(response.mappings().get("indexa").size(), equalTo(1)); - assertThat(response.mappings().get("indexa").get("typeA"), notNullValue()); - assertThat(response.mappings().get("indexb").size(), equalTo(1)); - assertThat(response.mappings().get("indexb").get("typeA"), notNullValue()); + assertThat(response.mappings().get("indexa"), notNullValue()); + assertThat(response.mappings().get("indexb"), notNullValue()); - // Get all typeA mappings in all indices - response = client().admin().indices().prepareGetMappings("*").setTypes("typeA").execute().actionGet(); - assertThat(response.mappings().size(), equalTo(2)); - assertThat(response.mappings().get("indexa").size(), equalTo(1)); - assertThat(response.mappings().get("indexa").get("typeA"), notNullValue()); - assertThat(response.mappings().get("indexb").size(), equalTo(1)); - assertThat(response.mappings().get("indexb").get("typeA"), notNullValue()); - - // Get all mappings in indexa + // Get mappings in indexa response = client().admin().indices().prepareGetMappings("indexa").execute().actionGet(); assertThat(response.mappings().size(), equalTo(1)); - assertThat(response.mappings().get("indexa").size(), equalTo(1)); - assertThat(response.mappings().get("indexa").get("typeA"), notNullValue()); - - // Get all mappings beginning with A* in indexa - response = client().admin().indices().prepareGetMappings("indexa").setTypes("*A").execute().actionGet(); - assertThat(response.mappings().size(), equalTo(1)); - assertThat(response.mappings().get("indexa").size(), equalTo(1)); - assertThat(response.mappings().get("indexa").get("typeA"), notNullValue()); - - // Get all mappings beginning with B* in all indices - response = client().admin().indices().prepareGetMappings().setTypes("B*").execute().actionGet(); - assertThat(response.mappings().size(), equalTo(0)); - - // Get all mappings beginning with B* and A* in all indices - response = client().admin().indices().prepareGetMappings().setTypes("B*", "*A").execute().actionGet(); - assertThat(response.mappings().size(), equalTo(2)); - assertThat(response.mappings().get("indexa").size(), equalTo(1)); - assertThat(response.mappings().get("indexa").get("typeA"), notNullValue()); - assertThat(response.mappings().get("indexb").size(), equalTo(1)); - assertThat(response.mappings().get("indexb").get("typeA"), notNullValue()); + assertThat(response.mappings().get("indexa"), notNullValue()); } public void testGetMappingsWithBlocks() throws IOException { @@ -152,7 +122,7 @@ public void testGetMappingsWithBlocks() throws IOException { enableIndexBlock("test", block); GetMappingsResponse response = client().admin().indices().prepareGetMappings().execute().actionGet(); assertThat(response.mappings().size(), equalTo(1)); - assertThat(response.mappings().get("test").size(), equalTo(1)); + assertNotNull(response.mappings().get("test")); } finally { disableIndexBlock("test", block); } diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java index 5ad516a6514fb..0a29794add5a8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -41,7 +41,6 @@ import org.opensearch.cluster.action.index.MappingUpdatedAction; import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.Priority; -import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentType; @@ -112,7 +111,8 @@ public void testDynamicUpdates() throws Exception { String type = "type"; String fieldName = "field_" + type + "_" + rec; indexRequests.add( - client().prepareIndex("test", type, Integer.toString(rec)) + client().prepareIndex("test") + .setId(Integer.toString(rec)) .setTimeout(TimeValue.timeValueMinutes(5)) .setSource(fieldName, "some_value") ); @@ -130,7 +130,7 @@ public void testDynamicUpdates() throws Exception { for (int rec = 0; rec < recCount; rec++) { String type = "type"; String fieldName = "field_" + type + "_" + rec; - assertConcreteMappingsOnAll("test", type, fieldName); + assertConcreteMappingsOnAll("test", fieldName); } client().admin() @@ -145,7 +145,7 @@ public void testUpdateMappingWithoutType() { .indices() .prepareCreate("test") .setSettings(Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0)) - .addMapping("_doc", "{\"_doc\":{\"properties\":{\"body\":{\"type\":\"text\"}}}}", XContentType.JSON) + .setMapping("{\"properties\":{\"body\":{\"type\":\"text\"}}}") .execute() .actionGet(); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); @@ -153,7 +153,6 @@ public void testUpdateMappingWithoutType() { AcknowledgedResponse putMappingResponse = client().admin() .indices() .preparePutMapping("test") - .setType("_doc") .setSource("{\"properties\":{\"date\":{\"type\":\"integer\"}}}", XContentType.JSON) .execute() .actionGet(); @@ -162,7 +161,7 @@ public void testUpdateMappingWithoutType() { GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("test").execute().actionGet(); assertThat( - getMappingsResponse.mappings().get("test").get("_doc").source().toString(), + getMappingsResponse.mappings().get("test").source().toString(), equalTo("{\"_doc\":{\"properties\":{\"body\":{\"type\":\"text\"},\"date\":{\"type\":\"integer\"}}}}") ); } @@ -179,7 +178,6 @@ public void testUpdateMappingWithoutTypeMultiObjects() { AcknowledgedResponse putMappingResponse = client().admin() .indices() .preparePutMapping("test") - .setType("_doc") .setSource("{\"properties\":{\"date\":{\"type\":\"integer\"}}}", XContentType.JSON) .execute() .actionGet(); @@ -188,7 +186,7 @@ public void testUpdateMappingWithoutTypeMultiObjects() { GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("test").execute().actionGet(); assertThat( - getMappingsResponse.mappings().get("test").get("_doc").source().toString(), + getMappingsResponse.mappings().get("test").source().toString(), equalTo("{\"_doc\":{\"properties\":{\"date\":{\"type\":\"integer\"}}}}") ); } @@ -198,7 +196,7 @@ public void testUpdateMappingWithConflicts() { .indices() .prepareCreate("test") .setSettings(Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0)) - .addMapping("type", "{\"type\":{\"properties\":{\"body\":{\"type\":\"text\"}}}}", XContentType.JSON) + .setMapping("{\"properties\":{\"body\":{\"type\":\"text\"}}}") .execute() .actionGet(); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); @@ -207,8 +205,10 @@ public void testUpdateMappingWithConflicts() { client().admin() .indices() .preparePutMapping("test") - .setType("type") - .setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"integer\"}}}}", XContentType.JSON) + .setSource( + "{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{\"properties\":{\"body\":{\"type\":\"integer\"}}}}", + XContentType.JSON + ) .execute() .actionGet(); fail("Expected MergeMappingException"); @@ -221,15 +221,17 @@ public void testUpdateMappingWithNormsConflicts() { client().admin() .indices() .prepareCreate("test") - .addMapping("type", "{\"type\":{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": false }}}}", XContentType.JSON) + .setMapping("{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": false }}}") .execute() .actionGet(); try { client().admin() .indices() .preparePutMapping("test") - .setType("type") - .setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": true }}}}", XContentType.JSON) + .setSource( + "{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": true }}}}", + XContentType.JSON + ) .execute() .actionGet(); fail("Expected MergeMappingException"); @@ -246,7 +248,7 @@ public void testUpdateMappingNoChanges() { .indices() .prepareCreate("test") .setSettings(Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0)) - .addMapping("type", "{\"type\":{\"properties\":{\"body\":{\"type\":\"text\"}}}}", XContentType.JSON) + .setMapping("{\"properties\":{\"body\":{\"type\":\"text\"}}}") .execute() .actionGet(); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); @@ -254,8 +256,7 @@ public void testUpdateMappingNoChanges() { AcknowledgedResponse putMappingResponse = client().admin() .indices() .preparePutMapping("test") - .setType("type") - .setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"text\"}}}}", XContentType.JSON) + .setSource("{\"properties\":{\"body\":{\"type\":\"text\"}}}", XContentType.JSON) .execute() .actionGet(); @@ -288,17 +289,15 @@ public void testUpdateMappingConcurrently() throws Throwable { Client client1 = clientArray.get(i % clientArray.size()); Client client2 = clientArray.get((i + 1) % clientArray.size()); String indexName = i % 2 == 0 ? "test2" : "test1"; - String typeName = "type"; String fieldName = Thread.currentThread().getName() + "_" + i; AcknowledgedResponse response = client1.admin() .indices() .preparePutMapping(indexName) - .setType(typeName) .setSource( JsonXContent.contentBuilder() .startObject() - .startObject(typeName) + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject(fieldName) .field("type", "text") @@ -312,10 +311,9 @@ public void testUpdateMappingConcurrently() throws Throwable { assertThat(response.isAcknowledged(), equalTo(true)); GetMappingsResponse getMappingResponse = client2.admin().indices().prepareGetMappings(indexName).get(); - ImmutableOpenMap mappings = getMappingResponse.getMappings().get(indexName); - assertThat(mappings.containsKey(typeName), equalTo(true)); + MappingMetadata mappings = getMappingResponse.getMappings().get(indexName); assertThat( - ((Map) mappings.get(typeName).getSourceAsMap().get("properties")).keySet(), + ((Map) mappings.getSourceAsMap().get("properties")).keySet(), Matchers.hasItem(fieldName) ); } @@ -349,7 +347,6 @@ public void testPutMappingsWithBlocks() { client().admin() .indices() .preparePutMapping("test") - .setType("_doc") .setSource("{\"properties\":{\"date\":{\"type\":\"integer\"}}}", XContentType.JSON) ); } finally { @@ -364,7 +361,6 @@ public void testPutMappingsWithBlocks() { client().admin() .indices() .preparePutMapping("test") - .setType("_doc") .setSource("{\"properties\":{\"date\":{\"type\":\"integer\"}}}", XContentType.JSON) ); } finally { @@ -377,7 +373,7 @@ public void testPutMappingsWithBlocks() { * Waits until mappings for the provided fields exist on all nodes. Note, this waits for the current * started shards and checks for concrete mappings. */ - private void assertConcreteMappingsOnAll(final String index, final String type, final String... fieldNames) { + private void assertConcreteMappingsOnAll(final String index, final String... fieldNames) { Set nodes = internalCluster().nodesInclude(index); assertThat(nodes, Matchers.not(Matchers.emptyIterable())); for (String node : nodes) { @@ -390,20 +386,18 @@ private void assertConcreteMappingsOnAll(final String index, final String type, assertNotNull("field " + fieldName + " doesn't exists on " + node, fieldType); } } - assertMappingOnMaster(index, type, fieldNames); + assertMappingOnMaster(index, fieldNames); } /** * Waits for the given mapping type to exists on the master node. */ - private void assertMappingOnMaster(final String index, final String type, final String... fieldNames) { - GetMappingsResponse response = client().admin().indices().prepareGetMappings(index).setTypes(type).get(); - ImmutableOpenMap mappings = response.getMappings().get(index); + private void assertMappingOnMaster(final String index, final String... fieldNames) { + GetMappingsResponse response = client().admin().indices().prepareGetMappings(index).get(); + MappingMetadata mappings = response.getMappings().get(index); assertThat(mappings, notNullValue()); - MappingMetadata mappingMetadata = mappings.get(type); - assertThat(mappingMetadata, notNullValue()); + Map mappingSource = mappings.getSourceAsMap(); - Map mappingSource = mappingMetadata.getSourceAsMap(); assertFalse(mappingSource.isEmpty()); assertTrue(mappingSource.containsKey("properties")); @@ -413,7 +407,7 @@ private void assertMappingOnMaster(final String index, final String type, final fieldName = fieldName.replace(".", ".properties."); } assertThat( - "field " + fieldName + " doesn't exists in mapping " + mappingMetadata.source().string(), + "field " + fieldName + " doesn't exists in mapping " + mappings.source().string(), XContentMapValues.extractValue(fieldName, mappingProperties), notNullValue() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerNoopIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerNoopIT.java index a409475da3cd7..1f79a52284c61 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerNoopIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerNoopIT.java @@ -70,7 +70,7 @@ public void testNoopRequestBreaker() throws Exception { int docCount = scaledRandomIntBetween(300, 1000); List reqs = new ArrayList<>(); for (long id = 0; id < docCount; id++) { - reqs.add(client.prepareIndex("cb-test", "type", Long.toString(id)).setSource("test", id)); + reqs.add(client.prepareIndex("cb-test").setId(Long.toString(id)).setSource("test", id)); } indexRandom(true, reqs); @@ -87,7 +87,7 @@ public void testNoopFielddataBreaker() throws Exception { int docCount = scaledRandomIntBetween(300, 1000); List reqs = new ArrayList<>(); for (long id = 0; id < docCount; id++) { - reqs.add(client.prepareIndex("cb-test", "type", Long.toString(id)).setSource("test", id)); + reqs.add(client.prepareIndex("cb-test").setId(Long.toString(id)).setSource("test", id)); } indexRandom(true, reqs); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java index b4f5301681879..e9bb9f5a90477 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java @@ -154,7 +154,7 @@ public void testMemoryBreaker() throws Exception { int docCount = scaledRandomIntBetween(300, 1000); List reqs = new ArrayList<>(); for (long id = 0; id < docCount; id++) { - reqs.add(client.prepareIndex("cb-test", "type", Long.toString(id)).setSource("test", "value" + id)); + reqs.add(client.prepareIndex("cb-test").setId(Long.toString(id)).setSource("test", "value" + id)); } indexRandom(true, false, true, reqs); @@ -208,7 +208,7 @@ public void testRamAccountingTermsEnum() throws Exception { int docCount = scaledRandomIntBetween(300, 1000); List reqs = new ArrayList<>(); for (long id = 0; id < docCount; id++) { - reqs.add(client.prepareIndex("ramtest", "type", Long.toString(id)).setSource("test", "value" + id)); + reqs.add(client.prepareIndex("ramtest").setId(Long.toString(id)).setSource("test", "value" + id)); } indexRandom(true, false, true, reqs); @@ -261,7 +261,7 @@ public void testRequestBreaker() throws Exception { int docCount = scaledRandomIntBetween(300, 1000); List reqs = new ArrayList<>(); for (long id = 0; id < docCount; id++) { - reqs.add(client.prepareIndex("cb-test", "type", Long.toString(id)).setSource("test", id)); + reqs.add(client.prepareIndex("cb-test").setId(Long.toString(id)).setSource("test", id)); } indexRandom(true, reqs); @@ -295,7 +295,7 @@ public void testBucketBreaker() throws Exception { int docCount = scaledRandomIntBetween(100, 1000); List reqs = new ArrayList<>(); for (long id = 0; id < docCount; id++) { - reqs.add(client.prepareIndex("cb-test", "type", Long.toString(id)).setSource("test", id)); + reqs.add(client.prepareIndex("cb-test").setId(Long.toString(id)).setSource("test", id)); } indexRandom(true, reqs); @@ -403,7 +403,7 @@ public void testLimitsRequestSize() { int numRequests = inFlightRequestsLimit.bytesAsInt(); BulkRequest bulkRequest = new BulkRequest(); for (int i = 0; i < numRequests; i++) { - IndexRequest indexRequest = new IndexRequest("index", "type", Integer.toString(i)); + IndexRequest indexRequest = new IndexRequest("index").id(Integer.toString(i)); indexRequest.source(Requests.INDEX_CONTENT_TYPE, "field", "value", "num", i); bulkRequest.add(indexRequest); } diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index eb36e1c155ecc..341c0a965f94e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -50,7 +50,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.MockEngineFactoryPlugin; import org.opensearch.index.query.QueryBuilders; import org.opensearch.indices.IndicesService; @@ -104,7 +103,6 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc .toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("test-str") .field("type", "keyword") @@ -115,7 +113,6 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc .field("type", randomFrom(Arrays.asList("float", "long", "double", "short", "integer"))) .endObject() // test-num .endObject() // properties - .endObject() // type .endObject() ); final double topLevelRate; @@ -149,7 +146,7 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc .indices() .prepareCreate("test") .setSettings(settings) - .addMapping("type", mapping, XContentType.JSON) + .setMapping(mapping) .execute() .actionGet(); final int numDocs; @@ -169,7 +166,8 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc } for (int i = 0; i < numDocs; i++) { try { - client().prepareIndex("test", "type", "" + i) + client().prepareIndex("test") + .setId("" + i) .setTimeout(TimeValue.timeValueSeconds(1)) .setSource("test-str", randomUnicodeOfLengthBetween(5, 25), "test-num", i) .get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java index 6d62b7969bc48..7fd2466647272 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java @@ -71,11 +71,11 @@ public void testPrimaryRelocationWhileIndexing() throws Exception { @Override public void run() { while (finished.get() == false && numAutoGenDocs.get() < 10_000) { - IndexResponse indexResponse = client().prepareIndex("test", "type", "id").setSource("field", "value").get(); + IndexResponse indexResponse = client().prepareIndex("test").setId("id").setSource("field", "value").get(); assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); - DeleteResponse deleteResponse = client().prepareDelete("test", "type", "id").get(); + DeleteResponse deleteResponse = client().prepareDelete("test", "id").get(); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); - client().prepareIndex("test", "type").setSource("auto", true).get(); + client().prepareIndex("test").setSource("auto", true).get(); numAutoGenDocs.incrementAndGet(); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java index 3bab909d3b7f3..a7dc77e024d5c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java @@ -33,8 +33,8 @@ package org.opensearch.indices.recovery; import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.index.IndexCommit; import org.apache.lucene.util.SetOnce; - import org.opensearch.OpenSearchException; import org.opensearch.Version; import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; @@ -75,6 +75,7 @@ import org.opensearch.common.Strings; import org.opensearch.common.breaker.CircuitBreaker; import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.ByteSizeUnit; import org.opensearch.common.unit.ByteSizeValue; @@ -88,7 +89,6 @@ import org.opensearch.index.MockEngineFactoryPlugin; import org.opensearch.index.analysis.AbstractTokenFilterFactory; import org.opensearch.index.analysis.TokenFilterFactory; -import org.opensearch.index.engine.Engine; import org.opensearch.index.mapper.MapperParsingException; import org.opensearch.index.recovery.RecoveryStats; import org.opensearch.index.seqno.ReplicationTracker; @@ -114,11 +114,11 @@ import org.opensearch.snapshots.SnapshotState; import org.opensearch.tasks.Task; import org.opensearch.test.BackgroundIndexer; +import org.opensearch.test.InternalSettingsPlugin; +import org.opensearch.test.InternalTestCluster; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.OpenSearchIntegTestCase.Scope; -import org.opensearch.test.InternalSettingsPlugin; -import org.opensearch.test.InternalTestCluster; import org.opensearch.test.engine.MockEngineSupport; import org.opensearch.test.store.MockFSIndexStore; import org.opensearch.test.transport.MockTransportService; @@ -151,12 +151,6 @@ import static java.util.Collections.singletonMap; import static java.util.stream.Collectors.toList; -import static org.opensearch.action.DocWriteResponse.Result.CREATED; -import static org.opensearch.action.DocWriteResponse.Result.UPDATED; -import static org.opensearch.node.RecoverySettingsChunkSizePlugin.CHUNK_SIZE_SETTING; -import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; -import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; - import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; @@ -167,12 +161,16 @@ import static org.hamcrest.Matchers.isOneOf; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; +import static org.opensearch.action.DocWriteResponse.Result.CREATED; +import static org.opensearch.action.DocWriteResponse.Result.UPDATED; +import static org.opensearch.node.RecoverySettingsChunkSizePlugin.CHUNK_SIZE_SETTING; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; @ClusterScope(scope = Scope.TEST, numDataNodes = 0) public class IndexRecoveryIT extends OpenSearchIntegTestCase { private static final String INDEX_NAME = "test-idx-1"; - private static final String INDEX_TYPE = "test-type-1"; private static final String REPO_NAME = "test-repo-1"; private static final String SNAP_NAME = "test-snap-1"; @@ -414,7 +412,7 @@ public void testCancelNewShardRecoveryAndUsesExistingShardCopy() throws Exceptio int numDocs = randomIntBetween(10, 200); final IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex(INDEX_NAME, INDEX_TYPE) + docs[i] = client().prepareIndex(INDEX_NAME) .setSource("foo-int", randomInt(), "foo-string", randomAlphaOfLength(32), "foo-float", randomFloat()); } indexRandom(randomBoolean(), docs); @@ -828,7 +826,7 @@ private IndicesStatsResponse createAndPopulateIndex(String name, int nodeCount, final IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex(name, INDEX_TYPE) + docs[i] = client().prepareIndex(name) .setSource("foo-int", randomInt(), "foo-string", randomAlphaOfLength(32), "foo-float", randomFloat()); } @@ -881,14 +879,14 @@ public void testTransientErrorsDuringRecoveryAreRetried() throws Exception { // is a mix of file chunks and translog ops int threeFourths = (int) (numDocs * 0.75); for (int i = 0; i < threeFourths; i++) { - requests.add(client().prepareIndex(indexName, "type").setSource("{}", XContentType.JSON)); + requests.add(client().prepareIndex(indexName).setSource("{}", XContentType.JSON)); } indexRandom(true, requests); flush(indexName); requests.clear(); for (int i = threeFourths; i < numDocs; i++) { - requests.add(client().prepareIndex(indexName, "type").setSource("{}", XContentType.JSON)); + requests.add(client().prepareIndex(indexName).setSource("{}", XContentType.JSON)); } indexRandom(true, requests); ensureSearchable(indexName); @@ -1080,7 +1078,7 @@ public void testDisconnectsWhileRecovering() throws Exception { List requests = new ArrayList<>(); int numDocs = scaledRandomIntBetween(25, 250); for (int i = 0; i < numDocs; i++) { - requests.add(client().prepareIndex(indexName, "type").setSource("{}", XContentType.JSON)); + requests.add(client().prepareIndex(indexName).setSource("{}", XContentType.JSON)); } indexRandom(true, requests); ensureSearchable(indexName); @@ -1234,7 +1232,7 @@ public void testDisconnectsDuringRecovery() throws Exception { List requests = new ArrayList<>(); int numDocs = scaledRandomIntBetween(25, 250); for (int i = 0; i < numDocs; i++) { - requests.add(client().prepareIndex(indexName, "type").setSource("{}", XContentType.JSON)); + requests.add(client().prepareIndex(indexName).setSource("{}", XContentType.JSON)); } indexRandom(true, requests); ensureSearchable(indexName); @@ -1377,7 +1375,7 @@ public void testHistoryRetention() throws Exception { final List requests = new ArrayList<>(); final int replicatedDocCount = scaledRandomIntBetween(25, 250); while (requests.size() < replicatedDocCount) { - requests.add(client().prepareIndex(indexName, "_doc").setSource("{}", XContentType.JSON)); + requests.add(client().prepareIndex(indexName).setSource("{}", XContentType.JSON)); } indexRandom(true, requests); if (randomBoolean()) { @@ -1399,7 +1397,7 @@ public void testHistoryRetention() throws Exception { final int numNewDocs = scaledRandomIntBetween(25, 250); for (int i = 0; i < numNewDocs; i++) { - client().prepareIndex(indexName, "_doc").setSource("{}", XContentType.JSON).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex(indexName).setSource("{}", XContentType.JSON).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); } // Flush twice to update the safe commit's local checkpoint assertThat(client().admin().indices().prepareFlush(indexName).setForce(true).execute().get().getFailedShards(), equalTo(0)); @@ -1435,15 +1433,11 @@ public void testDoNotInfinitelyWaitForMapping() { .put("index.number_of_shards", 1) .build() ); - client().admin() - .indices() - .preparePutMapping("test") - .setType("_doc") - .setSource("test_field", "type=text,analyzer=test_analyzer") - .get(); + client().admin().indices().preparePutMapping("test").setSource("test_field", "type=text,analyzer=test_analyzer").get(); int numDocs = between(1, 10); for (int i = 0; i < numDocs; i++) { - client().prepareIndex("test", "_doc", "u" + i) + client().prepareIndex("test") + .setId("u" + i) .setSource(singletonMap("test_field", Integer.toString(i)), XContentType.JSON) .get(); } @@ -1562,7 +1556,7 @@ public void testRecoverLocallyUpToGlobalCheckpoint() throws Exception { randomBoolean(), false, randomBoolean(), - IntStream.range(0, numDocs).mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)).collect(toList()) + IntStream.range(0, numDocs).mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)).collect(toList()) ); client().admin().indices().prepareRefresh(indexName).get(); // avoid refresh when we are failing a shard String failingNode = randomFrom(nodes); @@ -1604,9 +1598,9 @@ public void testRecoverLocallyUpToGlobalCheckpoint() throws Exception { .getShardOrNull(new ShardId(resolveIndex(indexName), 0)); final long lastSyncedGlobalCheckpoint = shard.getLastSyncedGlobalCheckpoint(); final long localCheckpointOfSafeCommit; - try (Engine.IndexCommitRef safeCommitRef = shard.acquireSafeIndexCommit()) { + try (GatedCloseable wrappedSafeCommit = shard.acquireSafeIndexCommit()) { localCheckpointOfSafeCommit = SequenceNumbers.loadSeqNoInfoFromLuceneCommit( - safeCommitRef.getIndexCommit().getUserData().entrySet() + wrappedSafeCommit.get().getUserData().entrySet() ).localCheckpoint; } final long maxSeqNo = shard.seqNoStats().getMaxSeqNo(); @@ -1658,9 +1652,7 @@ public void testUsesFileBasedRecoveryIfRetentionLeaseMissing() throws Exception randomBoolean(), randomBoolean(), randomBoolean(), - IntStream.range(0, between(0, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) - .collect(toList()) + IntStream.range(0, between(0, 100)).mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)).collect(toList()) ); ensureGreen(indexName); @@ -1731,9 +1723,7 @@ public void testUsesFileBasedRecoveryIfRetentionLeaseAheadOfGlobalCheckpoint() t randomBoolean(), randomBoolean(), randomBoolean(), - IntStream.range(0, between(0, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) - .collect(toList()) + IntStream.range(0, between(0, 100)).mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)).collect(toList()) ); ensureGreen(indexName); @@ -1767,7 +1757,7 @@ public Settings onNodeStopped(String nodeName) throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, between(1, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)) .collect(toList()) ); @@ -1826,9 +1816,7 @@ public void testUsesFileBasedRecoveryIfOperationsBasedRecoveryWouldBeUnreasonabl randomBoolean(), false, randomBoolean(), - IntStream.range(0, between(0, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) - .collect(toList()) + IntStream.range(0, between(0, 100)).mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)).collect(toList()) ); ensureGreen(indexName); @@ -1912,7 +1900,7 @@ public Settings onNodeStopped(String nodeName) throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, newDocCount) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)) .collect(toList()) ); @@ -1964,9 +1952,7 @@ public void testDoesNotCopyOperationsInSafeCommit() throws Exception { randomBoolean(), randomBoolean(), randomBoolean(), - IntStream.range(0, between(0, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) - .collect(toList()) + IntStream.range(0, between(0, 100)).mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)).collect(toList()) ); final ShardId shardId = new ShardId(resolveIndex(indexName), 0); @@ -1985,9 +1971,7 @@ public void testDoesNotCopyOperationsInSafeCommit() throws Exception { randomBoolean(), randomBoolean(), randomBoolean(), - IntStream.range(0, between(0, 100)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) - .collect(toList()) + IntStream.range(0, between(0, 100)).mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)).collect(toList()) ); assertAcked( @@ -2056,7 +2040,7 @@ public void testRepeatedRecovery() throws Exception { false, randomBoolean(), IntStream.range(0, randomIntBetween(0, 10)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)) .collect(toList()) ); @@ -2088,7 +2072,7 @@ public void testRepeatedRecovery() throws Exception { false, randomBoolean(), IntStream.range(0, randomIntBetween(0, 10)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)) .collect(toList()) ); @@ -2117,7 +2101,7 @@ public void testAllocateEmptyPrimaryResetsGlobalCheckpoint() throws Exception { .get() ); final List indexRequests = IntStream.range(0, between(10, 500)) - .mapToObj(n -> client().prepareIndex(indexName, "type").setSource("foo", "bar")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("foo", "bar")) .collect(Collectors.toList()); indexRandom(randomBoolean(), true, true, indexRequests); ensureGreen(); @@ -2179,7 +2163,7 @@ public void testPeerRecoveryTrimsLocalTranslog() throws Exception { indexers[i] = new Thread(() -> { while (stopped.get() == false) { try { - IndexResponse response = client().prepareIndex(indexName, "_doc") + IndexResponse response = client().prepareIndex(indexName) .setSource(Collections.singletonMap("f" + randomIntBetween(1, 10), randomNonNegativeLong()), XContentType.JSON) .get(); assertThat(response.getResult(), isOneOf(CREATED, UPDATED)); @@ -2233,7 +2217,7 @@ public void testReservesBytesDuringPeerRecoveryPhaseOne() throws Exception { ); ensureGreen(indexName); final List indexRequests = IntStream.range(0, between(10, 500)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("foo", "bar")) + .mapToObj(n -> client().prepareIndex(indexName).setSource("foo", "bar")) .collect(Collectors.toList()); indexRandom(randomBoolean(), true, true, indexRequests); assertThat(client().admin().indices().prepareFlush(indexName).get().getFailedShards(), equalTo(0)); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/settings/UpdateNumberOfReplicasIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/settings/UpdateNumberOfReplicasIT.java index d2d025949abb6..f78ecd82834c2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/settings/UpdateNumberOfReplicasIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/settings/UpdateNumberOfReplicasIT.java @@ -80,7 +80,8 @@ public void testSimpleUpdateNumberOfReplicas() throws Exception { assertThat(clusterHealth.getIndices().get("test").getActiveShards(), equalTo(numShards.totalNumShards)); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource(jsonBuilder().startObject().field("value", "test" + i).endObject()) .get(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/settings/UpdateSettingsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/settings/UpdateSettingsIT.java index 13e1e4a3fea52..6dab7781e08db 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/settings/UpdateSettingsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/settings/UpdateSettingsIT.java @@ -506,16 +506,16 @@ public void testOpenCloseUpdateSettings() throws Exception { public void testEngineGCDeletesSetting() throws Exception { createIndex("test"); - client().prepareIndex("test", "type", "1").setSource("f", 1).setVersionType(VersionType.EXTERNAL).setVersion(1).get(); - client().prepareDelete("test", "type", "1").setVersionType(VersionType.EXTERNAL).setVersion(2).get(); + client().prepareIndex("test").setId("1").setSource("f", 1).setVersionType(VersionType.EXTERNAL).setVersion(1).get(); + client().prepareDelete("test", "1").setVersionType(VersionType.EXTERNAL).setVersion(2).get(); // delete is still in cache this should fail assertRequestBuilderThrows( - client().prepareIndex("test", "type", "1").setSource("f", 3).setVersionType(VersionType.EXTERNAL).setVersion(1), + client().prepareIndex("test").setId("1").setSource("f", 3).setVersionType(VersionType.EXTERNAL).setVersion(1), VersionConflictEngineException.class ); assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.gc_deletes", 0))); - client().prepareDelete("test", "type", "1").setVersionType(VersionType.EXTERNAL).setVersion(4).get(); + client().prepareDelete("test", "1").setVersionType(VersionType.EXTERNAL).setVersion(4).get(); // Make sure the time has advanced for InternalEngine#resolveDocVersion() for (ThreadPool threadPool : internalCluster().getInstances(ThreadPool.class)) { @@ -524,7 +524,7 @@ public void testEngineGCDeletesSetting() throws Exception { } // delete should not be in cache - client().prepareIndex("test", "type", "1").setSource("f", 2).setVersionType(VersionType.EXTERNAL).setVersion(1); + client().prepareIndex("test").setId("1").setSource("f", 2).setVersionType(VersionType.EXTERNAL).setVersion(1); } public void testUpdateSettingsWithBlocks() { diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java index aebb891ae784b..41749a9bfd0f4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java @@ -143,7 +143,7 @@ public void testCloseIndex() throws Exception { false, randomBoolean(), IntStream.range(0, nbDocs) - .mapToObj(i -> client().prepareIndex(indexName, "_doc", String.valueOf(i)).setSource("num", i)) + .mapToObj(i -> client().prepareIndex(indexName).setId(String.valueOf(i)).setSource("num", i)) .collect(toList()) ); @@ -164,7 +164,7 @@ public void testCloseAlreadyClosedIndex() throws Exception { false, randomBoolean(), IntStream.range(0, randomIntBetween(1, 10)) - .mapToObj(i -> client().prepareIndex(indexName, "_doc", String.valueOf(i)).setSource("num", i)) + .mapToObj(i -> client().prepareIndex(indexName).setId(String.valueOf(i)).setSource("num", i)) .collect(toList()) ); } @@ -207,7 +207,7 @@ public void testConcurrentClose() throws InterruptedException { false, randomBoolean(), IntStream.range(0, nbDocs) - .mapToObj(i -> client().prepareIndex(indexName, "_doc", String.valueOf(i)).setSource("num", i)) + .mapToObj(i -> client().prepareIndex(indexName).setId(String.valueOf(i)).setSource("num", i)) .collect(toList()) ); ensureYellowAndNoInitializingShards(indexName); @@ -268,7 +268,7 @@ public void testCloseWhileDeletingIndices() throws Exception { false, randomBoolean(), IntStream.range(0, 10) - .mapToObj(n -> client().prepareIndex(indexName, "_doc", String.valueOf(n)).setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setId(String.valueOf(n)).setSource("num", n)) .collect(toList()) ); } @@ -395,7 +395,7 @@ public void testCloseIndexWaitForActiveShards() throws Exception { false, randomBoolean(), IntStream.range(0, nbDocs) - .mapToObj(i -> client().prepareIndex(indexName, "_doc", String.valueOf(i)).setSource("num", i)) + .mapToObj(i -> client().prepareIndex(indexName).setId(String.valueOf(i)).setSource("num", i)) .collect(toList()) ); ensureGreen(indexName); @@ -433,7 +433,7 @@ public void testNoopPeerRecoveriesWhenIndexClosed() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, randomIntBetween(0, 50)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)) .collect(toList()) ); ensureGreen(indexName); @@ -480,7 +480,7 @@ public void testRecoverExistingReplica() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, randomIntBetween(0, 50)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)) .collect(toList()) ); ensureGreen(indexName); @@ -492,7 +492,7 @@ public Settings onNodeStopped(String nodeName) throws Exception { Client client = client(dataNodes.get(0)); int moreDocs = randomIntBetween(1, 50); for (int i = 0; i < moreDocs; i++) { - client.prepareIndex(indexName, "_doc").setSource("num", i).get(); + client.prepareIndex(indexName).setSource("num", i).get(); } assertAcked(client.admin().indices().prepareClose(indexName)); return super.onNodeStopped(nodeName); @@ -529,7 +529,7 @@ public void testRelocatedClosedIndexIssue() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, randomIntBetween(0, 50)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)) .collect(toList()) ); assertAcked(client().admin().indices().prepareClose(indexName).setWaitForActiveShards(ActiveShardCount.ALL)); @@ -557,7 +557,7 @@ public void testResyncPropagatePrimaryTerm() throws Exception { randomBoolean(), randomBoolean(), IntStream.range(0, randomIntBetween(0, 50)) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)) .collect(toList()) ); ensureGreen(indexName); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseWhileRelocatingShardsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseWhileRelocatingShardsIT.java index 88be4d71aeb63..caf741e9b8882 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseWhileRelocatingShardsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseWhileRelocatingShardsIT.java @@ -120,7 +120,7 @@ public void testCloseWhileRelocatingShards() throws Exception { indexRandom( randomBoolean(), IntStream.range(0, nbDocs) - .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)) + .mapToObj(n -> client().prepareIndex(indexName).setSource("num", n)) .collect(Collectors.toList()) ); break; diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java index e22bae9cf3ad1..ca1e1399f8fdc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java @@ -47,7 +47,6 @@ import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.query.QueryBuilders; import org.opensearch.rest.RestStatus; @@ -305,22 +304,20 @@ public void testOpenCloseWithDocs() throws IOException, ExecutionException, Inte String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("test") .field("type", "keyword") .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(client().admin().indices().prepareCreate("test").addMapping("type", mapping, XContentType.JSON)); + assertAcked(client().admin().indices().prepareCreate("test").setMapping(mapping)); ensureGreen(); int docs = between(10, 100); IndexRequestBuilder[] builder = new IndexRequestBuilder[docs]; for (int i = 0; i < docs; i++) { - builder[i] = client().prepareIndex("test", "type", "" + i).setSource("test", "init"); + builder[i] = client().prepareIndex("test").setId("" + i).setSource("test", "init"); } indexRandom(true, builder); if (randomBoolean()) { @@ -331,7 +328,7 @@ public void testOpenCloseWithDocs() throws IOException, ExecutionException, Inte // check the index still contains the records that we indexed client().admin().indices().prepareOpen("test").execute().get(); ensureGreen(); - SearchResponse searchResponse = client().prepareSearch().setTypes("type").setQuery(QueryBuilders.matchQuery("test", "init")).get(); + SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.matchQuery("test", "init")).get(); assertNoFailures(searchResponse); assertHitCount(searchResponse, docs); } @@ -342,7 +339,7 @@ public void testOpenCloseIndexWithBlocks() { int docs = between(10, 100); for (int i = 0; i < docs; i++) { - client().prepareIndex("test", "type", "" + i).setSource("test", "init").execute().actionGet(); + client().prepareIndex("test").setId("" + i).setSource("test", "init").execute().actionGet(); } for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE)) { @@ -398,7 +395,7 @@ public void testTranslogStats() throws Exception { final int nbDocs = randomIntBetween(0, 50); int uncommittedOps = 0; for (long i = 0; i < nbDocs; i++) { - final IndexResponse indexResponse = client().prepareIndex(indexName, "_doc", Long.toString(i)).setSource("field", i).get(); + final IndexResponse indexResponse = client().prepareIndex(indexName).setId(Long.toString(i)).setSource("field", i).get(); assertThat(indexResponse.status(), is(RestStatus.CREATED)); if (rarely()) { diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/state/SimpleIndexStateIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/state/SimpleIndexStateIT.java index 7f092bae7a79d..b75e36efe1f2f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/state/SimpleIndexStateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/state/SimpleIndexStateIT.java @@ -75,7 +75,7 @@ public void testSimpleOpenClose() { ); logger.info("--> indexing a simple document"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); logger.info("--> closing test index..."); assertAcked(client().admin().indices().prepareClose("test")); @@ -86,7 +86,7 @@ public void testSimpleOpenClose() { logger.info("--> trying to index into a closed index ..."); try { - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); fail(); } catch (IndexClosedException e) { // all is well @@ -109,7 +109,7 @@ public void testSimpleOpenClose() { ); logger.info("--> indexing a simple document"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); } public void testFastCloseAfterCreateContinuesCreateAfterOpen() { @@ -150,7 +150,7 @@ public void testFastCloseAfterCreateContinuesCreateAfterOpen() { ); logger.info("--> indexing a simple document"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); } public void testConsistencyAfterIndexCreationFailure() { diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java index 9cd91cab2b122..c503dd9f83273 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java @@ -152,8 +152,8 @@ public void testFieldDataStats() { .get() ); ensureGreen(); - client().prepareIndex("test", "type", "1").setSource("field", "value1", "field2", "value1").execute().actionGet(); - client().prepareIndex("test", "type", "2").setSource("field", "value2", "field2", "value2").execute().actionGet(); + client().prepareIndex("test").setId("1").setSource("field", "value1", "field2", "value1").execute().actionGet(); + client().prepareIndex("test").setId("2").setSource("field", "value2", "field2", "value2").execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); NodesStatsResponse nodesStats = client().admin().cluster().prepareNodesStats("data:true").setIndices(true).execute().actionGet(); @@ -275,8 +275,8 @@ public void testClearAllCaches() throws Exception { ); ensureGreen(); client().admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet(); - client().prepareIndex("test", "type", "1").setSource("field", "value1").execute().actionGet(); - client().prepareIndex("test", "type", "2").setSource("field", "value2").execute().actionGet(); + client().prepareIndex("test").setId("1").setSource("field", "value1").execute().actionGet(); + client().prepareIndex("test").setId("2").setSource("field", "value2").execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); NodesStatsResponse nodesStats = client().admin().cluster().prepareNodesStats("data:true").setIndices(true).execute().actionGet(); @@ -385,7 +385,8 @@ public void testQueryCache() throws Exception { while (true) { IndexRequestBuilder[] builders = new IndexRequestBuilder[pageDocs]; for (int i = 0; i < pageDocs; ++i) { - builders[i] = client().prepareIndex("idx", "type", Integer.toString(counter++)) + builders[i] = client().prepareIndex("idx") + .setId(Integer.toString(counter++)) .setSource(jsonBuilder().startObject().field("common", "field").field("str_value", "s" + i).endObject()); } indexRandom(true, builders); @@ -445,7 +446,8 @@ public void testQueryCache() throws Exception { // index the data again... IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; ++i) { - builders[i] = client().prepareIndex("idx", "type", Integer.toString(i)) + builders[i] = client().prepareIndex("idx") + .setId(Integer.toString(i)) .setSource(jsonBuilder().startObject().field("common", "field").field("str_value", "s" + i).endObject()); } indexRandom(true, builders); @@ -577,7 +579,7 @@ public void testNonThrottleStats() throws Exception { sb.append(termUpto++); sb.append(" some random text that keeps repeating over and over again hambone"); } - client().prepareIndex("test", "type", "" + termUpto).setSource("field" + (i % 10), sb.toString()).get(); + client().prepareIndex("test").setId("" + termUpto).setSource("field" + (i % 10), sb.toString()).get(); } refresh(); stats = client().admin().indices().prepareStats().execute().actionGet(); @@ -613,7 +615,7 @@ public void testThrottleStats() throws Exception { sb.append(' '); sb.append(termUpto++); } - client().prepareIndex("test", "type", "" + termUpto).setSource("field" + (i % 10), sb.toString()).get(); + client().prepareIndex("test").setId("" + termUpto).setSource("field" + (i % 10), sb.toString()).get(); if (i % 2 == 0) { refresh(); } @@ -639,9 +641,9 @@ public void testSimpleStats() throws Exception { createIndex("test1", "test2"); ensureGreen(); - client().prepareIndex("test1", "type", Integer.toString(1)).setSource("field", "value").execute().actionGet(); - client().prepareIndex("test1", "type", Integer.toString(2)).setSource("field", "value").execute().actionGet(); - client().prepareIndex("test2", "type", Integer.toString(1)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test1").setId(Integer.toString(1)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test1").setId(Integer.toString(2)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test2").setId(Integer.toString(1)).setSource("field", "value").execute().actionGet(); refresh(); NumShards test1 = getNumShards("test1"); @@ -694,7 +696,7 @@ public void testSimpleStats() throws Exception { assertThat(stats.getTotal().getRefresh(), notNullValue()); // check get - GetResponse getResponse = client().prepareGet("test2", "type", "1").execute().actionGet(); + GetResponse getResponse = client().prepareGet("test2", "1").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(true)); stats = client().admin().indices().prepareStats().execute().actionGet(); @@ -703,7 +705,7 @@ public void testSimpleStats() throws Exception { assertThat(stats.getTotal().getGet().getMissingCount(), equalTo(0L)); // missing get - getResponse = client().prepareGet("test2", "type", "2").execute().actionGet(); + getResponse = client().prepareGet("test2", "2").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(false)); stats = client().admin().indices().prepareStats().execute().actionGet(); @@ -733,7 +735,8 @@ public void testSimpleStats() throws Exception { // index failed try { - client().prepareIndex("test1", "type", Integer.toString(1)) + client().prepareIndex("test1") + .setId(Integer.toString(1)) .setSource("field", "value") .setVersion(1) .setVersionType(VersionType.EXTERNAL) @@ -742,7 +745,8 @@ public void testSimpleStats() throws Exception { fail("Expected a version conflict"); } catch (VersionConflictEngineException e) {} try { - client().prepareIndex("test2", "type", Integer.toString(1)) + client().prepareIndex("test2") + .setId(Integer.toString(1)) .setSource("field", "value") .setVersion(1) .setVersionType(VersionType.EXTERNAL) @@ -751,7 +755,8 @@ public void testSimpleStats() throws Exception { fail("Expected a version conflict"); } catch (VersionConflictEngineException e) {} try { - client().prepareIndex("test2", "type", Integer.toString(1)) + client().prepareIndex("test2") + .setId(Integer.toString(1)) .setSource("field", "value") .setVersion(1) .setVersionType(VersionType.EXTERNAL) @@ -791,7 +796,7 @@ public void testMergeStats() { assertThat(stats.getTotal().getSearch(), nullValue()); for (int i = 0; i < 20; i++) { - client().prepareIndex("test_index", "_doc", Integer.toString(i)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test_index").setId(Integer.toString(i)).setSource("field", "value").execute().actionGet(); client().admin().indices().prepareFlush().execute().actionGet(); } client().admin().indices().prepareForceMerge().setMaxNumSegments(1).execute().actionGet(); @@ -837,9 +842,9 @@ public void testAllFlags() throws Exception { ensureGreen(); - client().prepareIndex("test_index", "_doc", Integer.toString(1)).setSource("field", "value").execute().actionGet(); - client().prepareIndex("test_index", "_doc", Integer.toString(2)).setSource("field", "value").execute().actionGet(); - client().prepareIndex("test_index_2", "type", Integer.toString(1)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test_index").setId(Integer.toString(1)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test_index").setId(Integer.toString(2)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test_index_2").setId(Integer.toString(1)).setSource("field", "value").execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); IndicesStatsRequestBuilder builder = client().admin().indices().prepareStats(); @@ -964,9 +969,9 @@ public void testMultiIndex() throws Exception { ensureGreen(); - client().prepareIndex("test1", "_doc", Integer.toString(1)).setSource("field", "value").execute().actionGet(); - client().prepareIndex("test1", "_doc", Integer.toString(2)).setSource("field", "value").execute().actionGet(); - client().prepareIndex("test2", "_doc", Integer.toString(1)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test1").setId(Integer.toString(1)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test1").setId(Integer.toString(2)).setSource("field", "value").execute().actionGet(); + client().prepareIndex("test2").setId(Integer.toString(1)).setSource("field", "value").execute().actionGet(); refresh(); int numShards1 = getNumShards("test1").totalNumShards; @@ -999,16 +1004,14 @@ public void testMultiIndex() throws Exception { public void testCompletionFieldsParam() throws Exception { assertAcked( - prepareCreate("test1").addMapping( - "_doc", + prepareCreate("test1").setMapping( "{ \"properties\": { \"bar\": { \"type\": \"text\", \"fields\": { \"completion\": { \"type\": \"completion\" }}}" - + ",\"baz\": { \"type\": \"text\", \"fields\": { \"completion\": { \"type\": \"completion\" }}}}}", - XContentType.JSON + + ",\"baz\": { \"type\": \"text\", \"fields\": { \"completion\": { \"type\": \"completion\" }}}}}" ) ); ensureGreen(); - client().prepareIndex("test1", "_doc", Integer.toString(1)).setSource("{\"bar\":\"bar\",\"baz\":\"baz\"}", XContentType.JSON).get(); + client().prepareIndex("test1").setId(Integer.toString(1)).setSource("{\"bar\":\"bar\",\"baz\":\"baz\"}", XContentType.JSON).get(); refresh(); IndicesStatsRequestBuilder builder = client().admin().indices().prepareStats(); @@ -1050,7 +1053,7 @@ public void testGroupsParam() throws Exception { ensureGreen(); - client().prepareIndex("test1", "bar", Integer.toString(1)).setSource("foo", "bar").execute().actionGet(); + client().prepareIndex("test1").setId(Integer.toString(1)).setSource("foo", "bar").execute().actionGet(); refresh(); client().prepareSearch("_all").setStats("bar", "baz").execute().actionGet(); @@ -1079,40 +1082,6 @@ public void testGroupsParam() throws Exception { } - public void testTypesParam() throws Exception { - createIndex("test1"); - createIndex("test2"); - - ensureGreen(); - - client().prepareIndex("test1", "bar", Integer.toString(1)).setSource("foo", "bar").execute().actionGet(); - client().prepareIndex("test2", "baz", Integer.toString(1)).setSource("foo", "bar").execute().actionGet(); - refresh(); - - IndicesStatsRequestBuilder builder = client().admin().indices().prepareStats(); - IndicesStatsResponse stats = builder.execute().actionGet(); - - assertThat(stats.getTotal().indexing.getTotal().getIndexCount(), greaterThan(0L)); - assertThat(stats.getTotal().indexing.getTypeStats(), is(nullValue())); - - stats = builder.setTypes("bar").execute().actionGet(); - assertThat(stats.getTotal().indexing.getTypeStats().get("bar").getIndexCount(), greaterThan(0L)); - assertThat(stats.getTotal().indexing.getTypeStats().containsKey("baz"), is(false)); - - stats = builder.setTypes("bar", "baz").execute().actionGet(); - assertThat(stats.getTotal().indexing.getTypeStats().get("bar").getIndexCount(), greaterThan(0L)); - assertThat(stats.getTotal().indexing.getTypeStats().get("baz").getIndexCount(), greaterThan(0L)); - - stats = builder.setTypes("*").execute().actionGet(); - assertThat(stats.getTotal().indexing.getTypeStats().get("bar").getIndexCount(), greaterThan(0L)); - assertThat(stats.getTotal().indexing.getTypeStats().get("baz").getIndexCount(), greaterThan(0L)); - - stats = builder.setTypes("*r").execute().actionGet(); - assertThat(stats.getTotal().indexing.getTypeStats().get("bar").getIndexCount(), greaterThan(0L)); - assertThat(stats.getTotal().indexing.getTypeStats().containsKey("baz"), is(false)); - - } - private static void set(Flag flag, IndicesStatsRequestBuilder builder, boolean set) { switch (flag) { case Docs: @@ -1244,8 +1213,8 @@ public void testFilterCacheStats() throws Exception { indexRandom( false, true, - client().prepareIndex("index", "type", "1").setSource("foo", "bar"), - client().prepareIndex("index", "type", "2").setSource("foo", "baz") + client().prepareIndex("index").setId("1").setSource("foo", "bar"), + client().prepareIndex("index").setId("2").setSource("foo", "baz") ); persistGlobalCheckpoint("index"); // Need to persist the global checkpoint for the soft-deletes retention MP. refresh(); @@ -1279,8 +1248,8 @@ public void testFilterCacheStats() throws Exception { assertThat(stats.getTotal().queryCache.getCacheSize(), greaterThan(0L)); }); - assertEquals(DocWriteResponse.Result.DELETED, client().prepareDelete("index", "type", "1").get().getResult()); - assertEquals(DocWriteResponse.Result.DELETED, client().prepareDelete("index", "type", "2").get().getResult()); + assertEquals(DocWriteResponse.Result.DELETED, client().prepareDelete("index", "1").get().getResult()); + assertEquals(DocWriteResponse.Result.DELETED, client().prepareDelete("index", "2").get().getResult()); // Here we are testing that a fully deleted segment should be dropped and its cached is evicted. // In order to instruct the merge policy not to keep a fully deleted segment, // we need to flush and make that commit safe so that the SoftDeletesPolicy can drop everything. @@ -1319,8 +1288,8 @@ public void testFilterCacheStats() throws Exception { indexRandom( true, - client().prepareIndex("index", "type", "1").setSource("foo", "bar"), - client().prepareIndex("index", "type", "2").setSource("foo", "baz") + client().prepareIndex("index").setId("1").setSource("foo", "bar"), + client().prepareIndex("index").setId("2").setSource("foo", "baz") ); assertBusy(() -> { @@ -1387,7 +1356,7 @@ public void testConcurrentIndexingAndStatsRequests() throws BrokenBarrierExcepti } while (!stop.get()) { final String id = Integer.toString(idGenerator.incrementAndGet()); - final IndexResponse response = client().prepareIndex("test", "type", id).setSource("{}", XContentType.JSON).get(); + final IndexResponse response = client().prepareIndex("test").setId(id).setSource("{}", XContentType.JSON).get(); assertThat(response.getResult(), equalTo(DocWriteResponse.Result.CREATED)); } }); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/template/SimpleIndexTemplateIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/template/SimpleIndexTemplateIT.java index f6a8f5fdfee90..378657a6554b4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/template/SimpleIndexTemplateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/template/SimpleIndexTemplateIT.java @@ -182,10 +182,7 @@ public void testSimpleIndexTemplateTests() throws Exception { assertThat(response.getIndexTemplates(), hasSize(2)); // index something into test_index, will match on both templates - client().prepareIndex("test_index", "type1", "1") - .setSource("field1", "value1", "field2", "value 2") - .setRefreshPolicy(IMMEDIATE) - .get(); + client().prepareIndex("test_index").setId("1").setSource("field1", "value1", "field2", "value 2").setRefreshPolicy(IMMEDIATE).get(); ensureGreen(); SearchResponse searchResponse = client().prepareSearch("test_index") @@ -200,10 +197,7 @@ public void testSimpleIndexTemplateTests() throws Exception { // field2 is not stored. assertThat(searchResponse.getHits().getAt(0).field("field2"), nullValue()); - client().prepareIndex("text_index", "type1", "1") - .setSource("field1", "value1", "field2", "value 2") - .setRefreshPolicy(IMMEDIATE) - .get(); + client().prepareIndex("text_index").setId("1").setSource("field1", "value1", "field2", "value 2").setRefreshPolicy(IMMEDIATE).get(); ensureGreen(); // now only match on one template (template_1) @@ -570,11 +564,11 @@ public void testIndexTemplateWithAliases() throws Exception { assertAcked(prepareCreate("test_index")); ensureGreen(); - client().prepareIndex("test_index", "_doc", "1").setSource("type", "type1", "field", "A value").get(); - client().prepareIndex("test_index", "_doc", "2").setSource("type", "type2", "field", "B value").get(); - client().prepareIndex("test_index", "_doc", "3").setSource("type", "typeX", "field", "C value").get(); - client().prepareIndex("test_index", "_doc", "4").setSource("type", "typeY", "field", "D value").get(); - client().prepareIndex("test_index", "_doc", "5").setSource("type", "typeZ", "field", "E value").get(); + client().prepareIndex("test_index").setId("1").setSource("type", "type1", "field", "A value").get(); + client().prepareIndex("test_index").setId("2").setSource("type", "type2", "field", "B value").get(); + client().prepareIndex("test_index").setId("3").setSource("type", "typeX", "field", "C value").get(); + client().prepareIndex("test_index").setId("4").setSource("type", "typeY", "field", "D value").get(); + client().prepareIndex("test_index").setId("5").setSource("type", "typeZ", "field", "E value").get(); GetAliasesResponse getAliasesResponse = client().admin().indices().prepareGetAliases().setIndices("test_index").get(); assertThat(getAliasesResponse.getAliases().size(), equalTo(1)); @@ -637,8 +631,8 @@ public void testIndexTemplateWithAliasesInSource() { assertThat(getAliasesResponse.getAliases().size(), equalTo(1)); assertThat(getAliasesResponse.getAliases().get("test_index").size(), equalTo(1)); - client().prepareIndex("test_index", "_doc", "1").setSource("field", "value1").get(); - client().prepareIndex("test_index", "_doc", "2").setSource("field", "value2").get(); + client().prepareIndex("test_index").setId("1").setSource("field", "value1").get(); + client().prepareIndex("test_index").setId("2").setSource("field", "value2").get(); refresh(); SearchResponse searchResponse = client().prepareSearch("test_index").get(); @@ -676,8 +670,8 @@ public void testIndexTemplateWithAliasesSource() { assertThat(getAliasesResponse.getAliases().size(), equalTo(1)); assertThat(getAliasesResponse.getAliases().get("test_index").size(), equalTo(3)); - client().prepareIndex("test_index", "_doc", "1").setSource("field", "value1").get(); - client().prepareIndex("test_index", "_doc", "2").setSource("field", "value2").get(); + client().prepareIndex("test_index").setId("1").setSource("field", "value1").get(); + client().prepareIndex("test_index").setId("2").setSource("field", "value2").get(); refresh(); SearchResponse searchResponse = client().prepareSearch("test_index").get(); @@ -838,12 +832,11 @@ public void testStrictAliasParsingInIndicesCreatedViaTemplates() throws Exceptio .addAlias(new Alias("alias4").filter(termQuery("field", "value"))) .get(); - client().prepareIndex("a1", "test", "test").setSource("{}", XContentType.JSON).get(); - BulkResponse response = client().prepareBulk().add(new IndexRequest("a2", "test", "test").source("{}", XContentType.JSON)).get(); + client().prepareIndex("a1").setId("test").setSource("{}", XContentType.JSON).get(); + BulkResponse response = client().prepareBulk().add(new IndexRequest("a2").id("test").source("{}", XContentType.JSON)).get(); assertThat(response.hasFailures(), is(false)); assertThat(response.getItems()[0].isFailed(), equalTo(false)); assertThat(response.getItems()[0].getIndex(), equalTo("a2")); - assertThat(response.getItems()[0].getType(), equalTo("test")); assertThat(response.getItems()[0].getId(), equalTo("test")); assertThat(response.getItems()[0].getVersion(), equalTo(1L)); @@ -855,9 +848,9 @@ public void testStrictAliasParsingInIndicesCreatedViaTemplates() throws Exceptio // So the aliases defined in the index template for this index will not fail // even though the fields in the alias fields don't exist yet and indexing into // an index that doesn't exist yet will succeed - client().prepareIndex("b1", "test", "test").setSource("{}", XContentType.JSON).get(); + client().prepareIndex("b1").setId("test").setSource("{}", XContentType.JSON).get(); - response = client().prepareBulk().add(new IndexRequest("b2", "test", "test").source("{}", XContentType.JSON)).get(); + response = client().prepareBulk().add(new IndexRequest("b2").id("test").source("{}", XContentType.JSON)).get(); assertThat(response.hasFailures(), is(false)); assertThat(response.getItems()[0].isFailed(), equalTo(false)); assertThat(response.getItems()[0].getId(), equalTo("test")); @@ -973,9 +966,9 @@ public void testMultipleTemplate() throws IOException { ) .get(); - client().prepareIndex("ax", "type1", "1").setSource("field1", "value1", "field2", "value2").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("ax").setId("1").setSource("field1", "value1", "field2", "value2").setRefreshPolicy(IMMEDIATE).get(); - client().prepareIndex("bx", "type1", "1").setSource("field1", "value1", "field2", "value2").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("bx").setId("1").setSource("field1", "value1", "field2", "value2").setRefreshPolicy(IMMEDIATE).get(); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/ingest/IngestClientIT.java b/server/src/internalClusterTest/java/org/opensearch/ingest/IngestClientIT.java index 4ef8b2ba38e67..2f666bbd65d4d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/ingest/IngestClientIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/ingest/IngestClientIT.java @@ -138,7 +138,7 @@ public void testSimulate() throws Exception { source.put("foo", "bar"); source.put("fail", false); source.put("processed", true); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, source); + IngestDocument ingestDocument = new IngestDocument("index", "id", null, null, null, source); assertThat(simulateDocumentBaseResult.getIngestDocument().getSourceAndMetadata(), equalTo(ingestDocument.getSourceAndMetadata())); assertThat(simulateDocumentBaseResult.getFailure(), nullValue()); @@ -167,7 +167,7 @@ public void testBulkWithIngestFailures() throws Exception { int numRequests = scaledRandomIntBetween(32, 128); BulkRequest bulkRequest = new BulkRequest(); for (int i = 0; i < numRequests; i++) { - IndexRequest indexRequest = new IndexRequest("index", "type", Integer.toString(i)).setPipeline("_id"); + IndexRequest indexRequest = new IndexRequest("index").id(Integer.toString(i)).setPipeline("_id"); indexRequest.source(Requests.INDEX_CONTENT_TYPE, "field", "value", "fail", i % 2 == 0); bulkRequest.add(indexRequest); } @@ -216,10 +216,10 @@ public void testBulkWithUpsert() throws Exception { client().admin().cluster().putPipeline(putPipelineRequest).get(); BulkRequest bulkRequest = new BulkRequest(); - IndexRequest indexRequest = new IndexRequest("index", "type", "1").setPipeline("_id"); + IndexRequest indexRequest = new IndexRequest("index").id("1").setPipeline("_id"); indexRequest.source(Requests.INDEX_CONTENT_TYPE, "field1", "val1"); bulkRequest.add(indexRequest); - UpdateRequest updateRequest = new UpdateRequest("index", "type", "2"); + UpdateRequest updateRequest = new UpdateRequest("index", "2"); updateRequest.doc("{}", Requests.INDEX_CONTENT_TYPE); updateRequest.upsert("{\"field1\":\"upserted_val\"}", XContentType.JSON).upsertRequest().setPipeline("_id"); bulkRequest.add(updateRequest); @@ -227,10 +227,10 @@ public void testBulkWithUpsert() throws Exception { BulkResponse response = client().bulk(bulkRequest).actionGet(); assertThat(response.getItems().length, equalTo(bulkRequest.requests().size())); - Map inserted = client().prepareGet("index", "type", "1").get().getSourceAsMap(); + Map inserted = client().prepareGet("index", "1").get().getSourceAsMap(); assertThat(inserted.get("field1"), equalTo("val1")); assertThat(inserted.get("processed"), equalTo(true)); - Map upserted = client().prepareGet("index", "type", "2").get().getSourceAsMap(); + Map upserted = client().prepareGet("index", "2").get().getSourceAsMap(); assertThat(upserted.get("field1"), equalTo("upserted_val")); assertThat(upserted.get("processed"), equalTo(true)); } @@ -256,16 +256,16 @@ public void test() throws Exception { assertThat(getResponse.pipelines().size(), equalTo(1)); assertThat(getResponse.pipelines().get(0).getId(), equalTo("_id")); - client().prepareIndex("test", "type", "1").setPipeline("_id").setSource("field", "value", "fail", false).get(); + client().prepareIndex("test").setId("1").setPipeline("_id").setSource("field", "value", "fail", false).get(); - Map doc = client().prepareGet("test", "type", "1").get().getSourceAsMap(); + Map doc = client().prepareGet("test", "1").get().getSourceAsMap(); assertThat(doc.get("field"), equalTo("value")); assertThat(doc.get("processed"), equalTo(true)); client().prepareBulk() - .add(client().prepareIndex("test", "type", "2").setSource("field", "value2", "fail", false).setPipeline("_id")) + .add(client().prepareIndex("test").setId("2").setSource("field", "value2", "fail", false).setPipeline("_id")) .get(); - doc = client().prepareGet("test", "type", "2").get().getSourceAsMap(); + doc = client().prepareGet("test", "2").get().getSourceAsMap(); assertThat(doc.get("field"), equalTo("value2")); assertThat(doc.get("processed"), equalTo(true)); @@ -319,7 +319,7 @@ public void testWithDedicatedMaster() throws Exception { client().admin().cluster().putPipeline(putPipelineRequest).get(); BulkItemResponse item = client(masterOnlyNode).prepareBulk() - .add(client().prepareIndex("test", "type").setSource("field", "value2", "drop", true).setPipeline("_id")) + .add(client().prepareIndex("test").setSource("field", "value2", "drop", true).setPipeline("_id")) .get() .getItems()[0]; assertFalse(item.isFailed()); @@ -451,8 +451,8 @@ public void testPipelineProcessorOnFailure() throws Exception { client().admin().cluster().putPipeline(putPipelineRequest).get(); } - client().prepareIndex("test", "_doc").setId("1").setSource("{}", XContentType.JSON).setPipeline("1").get(); - Map inserted = client().prepareGet("test", "_doc", "1").get().getSourceAsMap(); + client().prepareIndex("test").setId("1").setSource("{}", XContentType.JSON).setPipeline("1").get(); + Map inserted = client().prepareGet("test", "1").get().getSourceAsMap(); assertThat(inserted.get("readme"), equalTo("pipeline with id [3] is a bad pipeline")); } diff --git a/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java b/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java index e2480e0705ae3..5b78c5686dc6a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java @@ -62,14 +62,15 @@ public class SimpleMgetIT extends OpenSearchIntegTestCase { public void testThatMgetShouldWorkWithOneIndexMissing() throws IOException { createIndex("test"); - client().prepareIndex("test", "test", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("foo", "bar").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); MultiGetResponse mgetResponse = client().prepareMultiGet() - .add(new MultiGetRequest.Item("test", "test", "1")) - .add(new MultiGetRequest.Item("nonExistingIndex", "test", "1")) + .add(new MultiGetRequest.Item("test", "1")) + .add(new MultiGetRequest.Item("nonExistingIndex", "1")) .get(); assertThat(mgetResponse.getResponses().length, is(2)); @@ -84,7 +85,7 @@ public void testThatMgetShouldWorkWithOneIndexMissing() throws IOException { is("nonExistingIndex") ); - mgetResponse = client().prepareMultiGet().add(new MultiGetRequest.Item("nonExistingIndex", "test", "1")).get(); + mgetResponse = client().prepareMultiGet().add(new MultiGetRequest.Item("nonExistingIndex", "1")).get(); assertThat(mgetResponse.getResponses().length, is(1)); assertThat(mgetResponse.getResponses()[0].getIndex(), is("nonExistingIndex")); assertThat(mgetResponse.getResponses()[0].isFailed(), is(true)); @@ -99,14 +100,15 @@ public void testThatMgetShouldWorkWithMultiIndexAlias() throws IOException { assertAcked(prepareCreate("test").addAlias(new Alias("multiIndexAlias"))); assertAcked(prepareCreate("test2").addAlias(new Alias("multiIndexAlias"))); - client().prepareIndex("test", "test", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("foo", "bar").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); MultiGetResponse mgetResponse = client().prepareMultiGet() - .add(new MultiGetRequest.Item("test", "test", "1")) - .add(new MultiGetRequest.Item("multiIndexAlias", "test", "1")) + .add(new MultiGetRequest.Item("test", "1")) + .add(new MultiGetRequest.Item("multiIndexAlias", "1")) .get(); assertThat(mgetResponse.getResponses().length, is(2)); @@ -117,7 +119,7 @@ public void testThatMgetShouldWorkWithMultiIndexAlias() throws IOException { assertThat(mgetResponse.getResponses()[1].isFailed(), is(true)); assertThat(mgetResponse.getResponses()[1].getFailure().getMessage(), containsString("more than one index")); - mgetResponse = client().prepareMultiGet().add(new MultiGetRequest.Item("multiIndexAlias", "test", "1")).get(); + mgetResponse = client().prepareMultiGet().add(new MultiGetRequest.Item("multiIndexAlias", "1")).get(); assertThat(mgetResponse.getResponses().length, is(1)); assertThat(mgetResponse.getResponses()[0].getIndex(), is("multiIndexAlias")); assertThat(mgetResponse.getResponses()[0].isFailed(), is(true)); @@ -139,12 +141,13 @@ public void testThatMgetShouldWorkWithAliasRouting() throws IOException { ) ); - client().prepareIndex("alias1", "test", "1") + client().prepareIndex("alias1") + .setId("1") .setSource(jsonBuilder().startObject().field("foo", "bar").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - MultiGetResponse mgetResponse = client().prepareMultiGet().add(new MultiGetRequest.Item("alias1", "test", "1")).get(); + MultiGetResponse mgetResponse = client().prepareMultiGet().add(new MultiGetRequest.Item("alias1", "1")).get(); assertEquals(1, mgetResponse.getResponses().length); assertEquals("test", mgetResponse.getResponses()[0].getIndex()); @@ -165,20 +168,20 @@ public void testThatSourceFilteringIsSupported() throws Exception { .endObject() ); for (int i = 0; i < 100; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource(sourceBytesRef, XContentType.JSON).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource(sourceBytesRef, XContentType.JSON).get(); } MultiGetRequestBuilder request = client().prepareMultiGet(); for (int i = 0; i < 100; i++) { if (i % 2 == 0) { request.add( - new MultiGetRequest.Item(indexOrAlias(), "type", Integer.toString(i)).fetchSourceContext( + new MultiGetRequest.Item(indexOrAlias(), Integer.toString(i)).fetchSourceContext( new FetchSourceContext(true, new String[] { "included" }, new String[] { "*.hidden_field" }) ) ); } else { request.add( - new MultiGetRequest.Item(indexOrAlias(), "type", Integer.toString(i)).fetchSourceContext(new FetchSourceContext(false)) + new MultiGetRequest.Item(indexOrAlias(), Integer.toString(i)).fetchSourceContext(new FetchSourceContext(false)) ); } } @@ -212,15 +215,16 @@ public void testThatRoutingPerDocumentIsSupported() throws Exception { final String id = routingKeyForShard("test", 0); final String routingOtherShard = routingKeyForShard("test", 1); - client().prepareIndex("test", "test", id) + client().prepareIndex("test") + .setId(id) .setRefreshPolicy(IMMEDIATE) .setRouting(routingOtherShard) .setSource(jsonBuilder().startObject().field("foo", "bar").endObject()) .get(); MultiGetResponse mgetResponse = client().prepareMultiGet() - .add(new MultiGetRequest.Item(indexOrAlias(), "test", id).routing(routingOtherShard)) - .add(new MultiGetRequest.Item(indexOrAlias(), "test", id)) + .add(new MultiGetRequest.Item(indexOrAlias(), id).routing(routingOtherShard)) + .add(new MultiGetRequest.Item(indexOrAlias(), id)) .get(); assertThat(mgetResponse.getResponses().length, is(2)); diff --git a/server/src/internalClusterTest/java/org/opensearch/persistent/PersistentTasksExecutorFullRestartIT.java b/server/src/internalClusterTest/java/org/opensearch/persistent/PersistentTasksExecutorFullRestartIT.java index 2434afe5b8f06..708388b3328f0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/persistent/PersistentTasksExecutorFullRestartIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/persistent/PersistentTasksExecutorFullRestartIT.java @@ -84,21 +84,13 @@ public void testFullClusterRestart() throws Exception { assertThat(tasksInProgress.tasks().size(), equalTo(numberOfTasks)); // Make sure that at least one of the tasks is running - assertBusy( - () -> { - // Wait for the task to start - assertThat( - client().admin() - .cluster() - .prepareListTasks() - .setActions(TestPersistentTasksExecutor.NAME + "[c]") - .get() - .getTasks() - .size(), - greaterThan(0) - ); - } - ); + assertBusy(() -> { + // Wait for the task to start + assertThat( + client().admin().cluster().prepareListTasks().setActions(TestPersistentTasksExecutor.NAME + "[c]").get().getTasks().size(), + greaterThan(0) + ); + }); // Restart cluster internalCluster().fullRestart(); @@ -113,21 +105,13 @@ public void testFullClusterRestart() throws Exception { } logger.info("Waiting for {} tasks to start", numberOfTasks); - assertBusy( - () -> { - // Wait for all tasks to start - assertThat( - client().admin() - .cluster() - .prepareListTasks() - .setActions(TestPersistentTasksExecutor.NAME + "[c]") - .get() - .getTasks() - .size(), - equalTo(numberOfTasks) - ); - } - ); + assertBusy(() -> { + // Wait for all tasks to start + assertThat( + client().admin().cluster().prepareListTasks().setActions(TestPersistentTasksExecutor.NAME + "[c]").get().getTasks().size(), + equalTo(numberOfTasks) + ); + }); logger.info("Complete all tasks"); // Complete the running task and make sure it finishes properly @@ -136,18 +120,16 @@ public void testFullClusterRestart() throws Exception { equalTo(numberOfTasks) ); - assertBusy( - () -> { - // Make sure the task is removed from the cluster state - assertThat( - ((PersistentTasksCustomMetadata) internalCluster().clusterService() - .state() - .getMetadata() - .custom(PersistentTasksCustomMetadata.TYPE)).tasks(), - empty() - ); - } - ); + assertBusy(() -> { + // Make sure the task is removed from the cluster state + assertThat( + ((PersistentTasksCustomMetadata) internalCluster().clusterService() + .state() + .getMetadata() + .custom(PersistentTasksCustomMetadata.TYPE)).tasks(), + empty() + ); + }); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/persistent/PersistentTasksExecutorIT.java b/server/src/internalClusterTest/java/org/opensearch/persistent/PersistentTasksExecutorIT.java index 4e3cfa4fbb5c0..9ea80ae7dbd89 100644 --- a/server/src/internalClusterTest/java/org/opensearch/persistent/PersistentTasksExecutorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/persistent/PersistentTasksExecutorIT.java @@ -95,21 +95,13 @@ public void testPersistentActionFailure() throws Exception { PlainActionFuture> future = new PlainActionFuture<>(); persistentTasksService.sendStartRequest(UUIDs.base64UUID(), TestPersistentTasksExecutor.NAME, new TestParams("Blah"), future); long allocationId = future.get().getAllocationId(); - assertBusy( - () -> { - // Wait for the task to start - assertThat( - client().admin() - .cluster() - .prepareListTasks() - .setActions(TestPersistentTasksExecutor.NAME + "[c]") - .get() - .getTasks() - .size(), - equalTo(1) - ); - } - ); + assertBusy(() -> { + // Wait for the task to start + assertThat( + client().admin().cluster().prepareListTasks().setActions(TestPersistentTasksExecutor.NAME + "[c]").get().getTasks().size(), + equalTo(1) + ); + }); TaskInfo firstRunningTask = client().admin() .cluster() .prepareListTasks() @@ -130,15 +122,13 @@ public void testPersistentActionFailure() throws Exception { ); logger.info("Waiting for persistent task with id {} to disappear", firstRunningTask.getId()); - assertBusy( - () -> { - // Wait for the task to disappear completely - assertThat( - client().admin().cluster().prepareListTasks().setActions(TestPersistentTasksExecutor.NAME + "[c]").get().getTasks(), - empty() - ); - } - ); + assertBusy(() -> { + // Wait for the task to disappear completely + assertThat( + client().admin().cluster().prepareListTasks().setActions(TestPersistentTasksExecutor.NAME + "[c]").get().getTasks(), + empty() + ); + }); } public void testPersistentActionCompletion() throws Exception { @@ -147,21 +137,13 @@ public void testPersistentActionCompletion() throws Exception { String taskId = UUIDs.base64UUID(); persistentTasksService.sendStartRequest(taskId, TestPersistentTasksExecutor.NAME, new TestParams("Blah"), future); long allocationId = future.get().getAllocationId(); - assertBusy( - () -> { - // Wait for the task to start - assertThat( - client().admin() - .cluster() - .prepareListTasks() - .setActions(TestPersistentTasksExecutor.NAME + "[c]") - .get() - .getTasks() - .size(), - equalTo(1) - ); - } - ); + assertBusy(() -> { + // Wait for the task to start + assertThat( + client().admin().cluster().prepareListTasks().setActions(TestPersistentTasksExecutor.NAME + "[c]").get().getTasks().size(), + equalTo(1) + ); + }); TaskInfo firstRunningTask = client().admin() .cluster() .prepareListTasks() @@ -225,15 +207,13 @@ public void testPersistentActionWithNoAvailableNode() throws Exception { internalCluster().stopRandomNode(settings -> "test".equals(settings.get("node.attr.test_attr"))); - assertBusy( - () -> { - // Wait for the task to disappear completely - assertThat( - client().admin().cluster().prepareListTasks().setActions(TestPersistentTasksExecutor.NAME + "[c]").get().getTasks(), - empty() - ); - } - ); + assertBusy(() -> { + // Wait for the task to disappear completely + assertThat( + client().admin().cluster().prepareListTasks().setActions(TestPersistentTasksExecutor.NAME + "[c]").get().getTasks(), + empty() + ); + }); // Remove the persistent task PlainActionFuture> removeFuture = new PlainActionFuture<>(); @@ -368,21 +348,13 @@ public void testCreatePersistentTaskWithDuplicateId() throws Exception { persistentTasksService.sendStartRequest(taskId, TestPersistentTasksExecutor.NAME, new TestParams("Blah"), future2); assertFutureThrows(future2, ResourceAlreadyExistsException.class); - assertBusy( - () -> { - // Wait for the task to start - assertThat( - client().admin() - .cluster() - .prepareListTasks() - .setActions(TestPersistentTasksExecutor.NAME + "[c]") - .get() - .getTasks() - .size(), - equalTo(1) - ); - } - ); + assertBusy(() -> { + // Wait for the task to start + assertThat( + client().admin().cluster().prepareListTasks().setActions(TestPersistentTasksExecutor.NAME + "[c]").get().getTasks().size(), + equalTo(1) + ); + }); TaskInfo firstRunningTask = client().admin() .cluster() @@ -400,15 +372,13 @@ public void testCreatePersistentTaskWithDuplicateId() throws Exception { ); logger.info("Waiting for persistent task with id {} to disappear", firstRunningTask.getId()); - assertBusy( - () -> { - // Wait for the task to disappear completely - assertThat( - client().admin().cluster().prepareListTasks().setActions(TestPersistentTasksExecutor.NAME + "[c]").get().getTasks(), - empty() - ); - } - ); + assertBusy(() -> { + // Wait for the task to disappear completely + assertThat( + client().admin().cluster().prepareListTasks().setActions(TestPersistentTasksExecutor.NAME + "[c]").get().getTasks(), + empty() + ); + }); } public void testUnassignRunningPersistentTask() throws Exception { @@ -489,21 +459,13 @@ private void stopOrCancelTask(TaskId taskId) { } private static void waitForTaskToStart() throws Exception { - assertBusy( - () -> { - // Wait for the task to start - assertThat( - client().admin() - .cluster() - .prepareListTasks() - .setActions(TestPersistentTasksExecutor.NAME + "[c]") - .get() - .getTasks() - .size(), - equalTo(1) - ); - } - ); + assertBusy(() -> { + // Wait for the task to start + assertThat( + client().admin().cluster().prepareListTasks().setActions(TestPersistentTasksExecutor.NAME + "[c]").get().getTasks().size(), + equalTo(1) + ); + }); } private static void assertClusterStateHasTask(String taskId) { diff --git a/server/src/internalClusterTest/java/org/opensearch/recovery/FullRollingRestartIT.java b/server/src/internalClusterTest/java/org/opensearch/recovery/FullRollingRestartIT.java index 4808371633cd0..15d1f3a0559a8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/recovery/FullRollingRestartIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/recovery/FullRollingRestartIT.java @@ -73,14 +73,16 @@ public void testFullRollingRestart() throws Exception { final String healthTimeout = "1m"; for (int i = 0; i < 1000; i++) { - client().prepareIndex("test", "type1", Long.toString(i)) + client().prepareIndex("test") + .setId(Long.toString(i)) .setSource(MapBuilder.newMapBuilder().put("test", "value" + i).map()) .execute() .actionGet(); } flush(); for (int i = 1000; i < 2000; i++) { - client().prepareIndex("test", "type1", Long.toString(i)) + client().prepareIndex("test") + .setId(Long.toString(i)) .setSource(MapBuilder.newMapBuilder().put("test", "value" + i).map()) .execute() .actionGet(); @@ -210,7 +212,8 @@ public void testNoRebalanceOnRollingRestart() throws Exception { ).get(); for (int i = 0; i < 100; i++) { - client().prepareIndex("test", "type1", Long.toString(i)) + client().prepareIndex("test") + .setId(Long.toString(i)) .setSource(MapBuilder.newMapBuilder().put("test", "value" + i).map()) .execute() .actionGet(); diff --git a/server/src/internalClusterTest/java/org/opensearch/recovery/RecoveryWhileUnderLoadIT.java b/server/src/internalClusterTest/java/org/opensearch/recovery/RecoveryWhileUnderLoadIT.java index 3ada65909b72f..26b3e9ae336dc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/recovery/RecoveryWhileUnderLoadIT.java @@ -419,7 +419,7 @@ private void iterateAssertCount(final int numberOfShards, final int iterations, ShardId docShard = clusterService.operationRouting().shardId(state, "test", id, null); if (docShard.id() == shard) { for (ShardRouting shardRouting : state.routingTable().shardRoutingTable("test", shard)) { - GetResponse response = client().prepareGet("test", "type", id) + GetResponse response = client().prepareGet("test", id) .setPreference("_only_nodes:" + shardRouting.currentNodeId()) .get(); if (response.isExists()) { diff --git a/server/src/internalClusterTest/java/org/opensearch/recovery/RelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/recovery/RelocationIT.java index 6949409ae5f63..c5b0d99e6d275 100644 --- a/server/src/internalClusterTest/java/org/opensearch/recovery/RelocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/recovery/RelocationIT.java @@ -150,13 +150,13 @@ public void testSimpleRelocationNoIndexing() { logger.info("--> index 10 docs"); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); } logger.info("--> flush so we have an actual index"); client().admin().indices().prepareFlush().execute().actionGet(); logger.info("--> index more docs so we have something in the translog"); for (int i = 10; i < 20; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); } logger.info("--> verifying count"); @@ -376,12 +376,12 @@ public void indexShardStateChanged( List builders1 = new ArrayList<>(); for (int numDocs = randomIntBetween(10, 30); numDocs > 0; numDocs--) { - builders1.add(client().prepareIndex("test", "type").setSource("{}", XContentType.JSON)); + builders1.add(client().prepareIndex("test").setSource("{}", XContentType.JSON)); } List builders2 = new ArrayList<>(); for (int numDocs = randomIntBetween(10, 30); numDocs > 0; numDocs--) { - builders2.add(client().prepareIndex("test", "type").setSource("{}", XContentType.JSON)); + builders2.add(client().prepareIndex("test").setSource("{}", XContentType.JSON)); } logger.info("--> START relocate the shard from {} to {}", nodes[fromNode], nodes[toNode]); @@ -441,7 +441,7 @@ public void testCancellationCleansTempFiles() throws Exception { List requests = new ArrayList<>(); int numDocs = scaledRandomIntBetween(25, 250); for (int i = 0; i < numDocs; i++) { - requests.add(client().prepareIndex(indexName, "type").setSource("{}", XContentType.JSON)); + requests.add(client().prepareIndex(indexName).setSource("{}", XContentType.JSON)); } indexRandom(true, requests); assertFalse(client().admin().cluster().prepareHealth().setWaitForNodes("3").setWaitForGreenStatus().get().isTimedOut()); @@ -560,7 +560,7 @@ public void testIndexSearchAndRelocateConcurrently() throws Exception { for (int i = 0; i < numDocs; i++) { String id = randomRealisticUnicodeOfLength(10) + String.valueOf(i); ids.add(id); - docs[i] = client().prepareIndex("test", "type1", id).setSource("field1", English.intToEnglish(i)); + docs[i] = client().prepareIndex("test").setId(id).setSource("field1", English.intToEnglish(i)); } indexRandom(true, docs); SearchResponse countResponse = client().prepareSearch("test").get(); @@ -578,7 +578,7 @@ public void testIndexSearchAndRelocateConcurrently() throws Exception { for (int i = 0; i < numDocs; i++) { String id = randomRealisticUnicodeOfLength(10) + String.valueOf(numDocs + i); ids.add(id); - docs[i] = client().prepareIndex("test", "type1", id).setSource("field1", English.intToEnglish(numDocs + i)); + docs[i] = client().prepareIndex("test").setId(id).setSource("field1", English.intToEnglish(numDocs + i)); } indexRandom(true, docs); @@ -614,13 +614,14 @@ public void testRelocateWhileWaitingForRefresh() { logger.info("--> index 10 docs"); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); } logger.info("--> flush so we have an actual index"); client().admin().indices().prepareFlush().execute().actionGet(); logger.info("--> index more docs so we have something in the translog"); for (int i = 10; i < 20; i++) { - client().prepareIndex("test", "type", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) .setSource("field", "value" + i) .execute(); @@ -671,7 +672,7 @@ public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() throws E logger.info("--> index 10 docs"); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); } logger.info("--> flush so we have an actual index"); client().admin().indices().prepareFlush().execute().actionGet(); @@ -679,7 +680,8 @@ public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() throws E final List> pendingIndexResponses = new ArrayList<>(); for (int i = 10; i < 20; i++) { pendingIndexResponses.add( - client().prepareIndex("test", "type", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) .setSource("field", "value" + i) .execute() @@ -706,7 +708,8 @@ public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() throws E logger.info("--> index 100 docs while relocating"); for (int i = 20; i < 120; i++) { pendingIndexResponses.add( - client().prepareIndex("test", "type", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) .setSource("field", "value" + i) .execute() @@ -810,8 +813,8 @@ public void sendRequest( if (chunkRequest.name().startsWith(IndexFileNames.SEGMENTS)) { // corrupting the segments_N files in order to make sure future recovery re-send files logger.debug("corrupting [{}] to {}. file name: [{}]", action, connection.getNode(), chunkRequest.name()); - assert chunkRequest.content().toBytesRef().bytes == chunkRequest.content() - .toBytesRef().bytes : "no internal reference!!"; + assert chunkRequest.content().toBytesRef().bytes == chunkRequest.content().toBytesRef().bytes + : "no internal reference!!"; byte[] array = chunkRequest.content().toBytesRef().bytes; array[0] = (byte) ~array[0]; // flip one byte in the content corruptionCount.countDown(); diff --git a/server/src/internalClusterTest/java/org/opensearch/recovery/SimpleRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/recovery/SimpleRecoveryIT.java index cb80dddb81cb6..4ebb840c600d2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/recovery/SimpleRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/recovery/SimpleRecoveryIT.java @@ -67,12 +67,12 @@ public void testSimpleRecovery() throws Exception { NumShards numShards = getNumShards("test"); - client().index(indexRequest("test").type("type1").id("1").source(source("1", "test"), XContentType.JSON)).actionGet(); + client().index(indexRequest("test").id("1").source(source("1", "test"), XContentType.JSON)).actionGet(); FlushResponse flushResponse = client().admin().indices().flush(flushRequest("test")).actionGet(); assertThat(flushResponse.getTotalShards(), equalTo(numShards.totalNumShards)); assertThat(flushResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(flushResponse.getFailedShards(), equalTo(0)); - client().index(indexRequest("test").type("type1").id("2").source(source("2", "test"), XContentType.JSON)).actionGet(); + client().index(indexRequest("test").id("2").source(source("2", "test"), XContentType.JSON)).actionGet(); RefreshResponse refreshResponse = client().admin().indices().refresh(refreshRequest("test")).actionGet(); assertThat(refreshResponse.getTotalShards(), equalTo(numShards.totalNumShards)); assertThat(refreshResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); @@ -86,13 +86,13 @@ public void testSimpleRecovery() throws Exception { GetResponse getResult; for (int i = 0; i < 5; i++) { - getResult = client().get(getRequest("test").type("type1").id("1")).actionGet(); + getResult = client().get(getRequest("test").id("1")).actionGet(); assertThat(getResult.getSourceAsString(), equalTo(source("1", "test"))); - getResult = client().get(getRequest("test").type("type1").id("1")).actionGet(); + getResult = client().get(getRequest("test").id("1")).actionGet(); assertThat(getResult.getSourceAsString(), equalTo(source("1", "test"))); - getResult = client().get(getRequest("test").type("type1").id("2")).actionGet(); + getResult = client().get(getRequest("test").id("2")).actionGet(); assertThat(getResult.getSourceAsString(), equalTo(source("2", "test"))); - getResult = client().get(getRequest("test").type("type1").id("2")).actionGet(); + getResult = client().get(getRequest("test").id("2")).actionGet(); assertThat(getResult.getSourceAsString(), equalTo(source("2", "test"))); } @@ -103,17 +103,17 @@ public void testSimpleRecovery() throws Exception { ensureGreen(); for (int i = 0; i < 5; i++) { - getResult = client().get(getRequest("test").type("type1").id("1")).actionGet(); + getResult = client().get(getRequest("test").id("1")).actionGet(); assertThat(getResult.getSourceAsString(), equalTo(source("1", "test"))); - getResult = client().get(getRequest("test").type("type1").id("1")).actionGet(); + getResult = client().get(getRequest("test").id("1")).actionGet(); assertThat(getResult.getSourceAsString(), equalTo(source("1", "test"))); - getResult = client().get(getRequest("test").type("type1").id("1")).actionGet(); + getResult = client().get(getRequest("test").id("1")).actionGet(); assertThat(getResult.getSourceAsString(), equalTo(source("1", "test"))); - getResult = client().get(getRequest("test").type("type1").id("2")).actionGet(); + getResult = client().get(getRequest("test").id("2")).actionGet(); assertThat(getResult.getSourceAsString(), equalTo(source("2", "test"))); - getResult = client().get(getRequest("test").type("type1").id("2")).actionGet(); + getResult = client().get(getRequest("test").id("2")).actionGet(); assertThat(getResult.getSourceAsString(), equalTo(source("2", "test"))); - getResult = client().get(getRequest("test").type("type1").id("2")).actionGet(); + getResult = client().get(getRequest("test").id("2")).actionGet(); assertThat(getResult.getSourceAsString(), equalTo(source("2", "test"))); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java index 43f21d9397580..45fb1a8fc58c0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java @@ -122,7 +122,7 @@ public void testCancelRecoveryAndResume() throws Exception { List builder = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { String id = Integer.toString(i); - builder.add(client().prepareIndex("test", "type1", id).setSource("field1", English.intToEnglish(i), "the_id", id)); + builder.add(client().prepareIndex("test").setId(id).setSource("field1", English.intToEnglish(i), "the_id", id)); } indexRandom(true, builder); for (int i = 0; i < numDocs; i++) { diff --git a/server/src/internalClusterTest/java/org/opensearch/routing/AliasResolveRoutingIT.java b/server/src/internalClusterTest/java/org/opensearch/routing/AliasResolveRoutingIT.java index 1c2bdf2e3a09b..eb929fd28d2ef 100644 --- a/server/src/internalClusterTest/java/org/opensearch/routing/AliasResolveRoutingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/routing/AliasResolveRoutingIT.java @@ -61,9 +61,9 @@ public void testSearchClosedWildcardIndex() throws ExecutionException, Interrupt client().admin().indices().prepareClose("test-1").get(); indexRandom( true, - client().prepareIndex("test-0", "type1", "1").setSource("field1", "the quick brown fox jumps"), - client().prepareIndex("test-0", "type1", "2").setSource("field1", "quick brown"), - client().prepareIndex("test-0", "type1", "3").setSource("field1", "quick") + client().prepareIndex("test-0").setId("1").setSource("field1", "the quick brown fox jumps"), + client().prepareIndex("test-0").setId("2").setSource("field1", "quick brown"), + client().prepareIndex("test-0").setId("3").setSource("field1", "quick") ); refresh("test-*"); assertHitCount( diff --git a/server/src/internalClusterTest/java/org/opensearch/routing/AliasRoutingIT.java b/server/src/internalClusterTest/java/org/opensearch/routing/AliasRoutingIT.java index 3b05b6d3bb21b..274133c2c8239 100644 --- a/server/src/internalClusterTest/java/org/opensearch/routing/AliasRoutingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/routing/AliasRoutingIT.java @@ -63,61 +63,61 @@ public void testAliasCrudRouting() throws Exception { assertAcked(admin().indices().prepareAliases().addAliasAction(AliasActions.add().index("test").alias("alias0").routing("0"))); logger.info("--> indexing with id [1], and routing [0] using alias"); - client().prepareIndex("alias0", "type1", "1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("alias0").setId("1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> verifying get with no routing, should not find anything"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false)); + assertThat(client().prepareGet("test", "1").execute().actionGet().isExists(), equalTo(false)); } logger.info("--> verifying get with routing, should find"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test", "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("test", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true)); } logger.info("--> verifying get with routing alias, should find"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("alias0", "type1", "1").execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("alias0", "1").execute().actionGet().isExists(), equalTo(true)); } logger.info("--> updating with id [1] and routing through alias"); - client().prepareUpdate("alias0", "type1", "1") + client().prepareUpdate("alias0", "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject()) .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2") .execute() .actionGet(); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("alias0", "type1", "1").execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("alias0", "1").execute().actionGet().isExists(), equalTo(true)); assertThat( - client().prepareGet("alias0", "type1", "1").execute().actionGet().getSourceAsMap().get("field").toString(), + client().prepareGet("alias0", "1").execute().actionGet().getSourceAsMap().get("field").toString(), equalTo("value2") ); } logger.info("--> deleting with no routing, should not delete anything"); - client().prepareDelete("test", "type1", "1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareDelete("test", "1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false)); - assertThat(client().prepareGet("test", "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true)); - assertThat(client().prepareGet("alias0", "type1", "1").execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("test", "1").execute().actionGet().isExists(), equalTo(false)); + assertThat(client().prepareGet("test", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("alias0", "1").execute().actionGet().isExists(), equalTo(true)); } logger.info("--> deleting with routing alias, should delete"); - client().prepareDelete("alias0", "type1", "1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareDelete("alias0", "1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false)); - assertThat(client().prepareGet("test", "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(false)); - assertThat(client().prepareGet("alias0", "type1", "1").execute().actionGet().isExists(), equalTo(false)); + assertThat(client().prepareGet("test", "1").execute().actionGet().isExists(), equalTo(false)); + assertThat(client().prepareGet("test", "1").setRouting("0").execute().actionGet().isExists(), equalTo(false)); + assertThat(client().prepareGet("alias0", "1").execute().actionGet().isExists(), equalTo(false)); } logger.info("--> indexing with id [1], and routing [0] using alias"); - client().prepareIndex("alias0", "type1", "1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("alias0").setId("1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> verifying get with no routing, should not find anything"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false)); + assertThat(client().prepareGet("test", "1").execute().actionGet().isExists(), equalTo(false)); } logger.info("--> verifying get with routing, should find"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test", "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true)); - assertThat(client().prepareGet("alias0", "type1", "1").execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("test", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("alias0", "1").execute().actionGet().isExists(), equalTo(true)); } } @@ -134,14 +134,14 @@ public void testAliasSearchRouting() throws Exception { ); logger.info("--> indexing with id [1], and routing [0] using alias"); - client().prepareIndex("alias0", "type1", "1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("alias0").setId("1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> verifying get with no routing, should not find anything"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false)); + assertThat(client().prepareGet("test", "1").execute().actionGet().isExists(), equalTo(false)); } logger.info("--> verifying get with routing, should find"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("alias0", "type1", "1").execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("alias0", "1").execute().actionGet().isExists(), equalTo(true)); } logger.info("--> search with no routing, should fine one"); @@ -245,7 +245,7 @@ public void testAliasSearchRouting() throws Exception { } logger.info("--> indexing with id [2], and routing [1] using alias"); - client().prepareIndex("alias1", "type1", "2").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("alias1").setId("2").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> search with no routing, should fine two"); for (int i = 0; i < 5; i++) { @@ -491,25 +491,25 @@ public void testAliasSearchRoutingWithTwoIndices() throws Exception { ); ensureGreen(); // wait for events again to make sure we got the aliases on all nodes logger.info("--> indexing with id [1], and routing [0] using alias to test-a"); - client().prepareIndex("alias-a0", "type1", "1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("alias-a0").setId("1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> verifying get with no routing, should not find anything"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test-a", "type1", "1").execute().actionGet().isExists(), equalTo(false)); + assertThat(client().prepareGet("test-a", "1").execute().actionGet().isExists(), equalTo(false)); } logger.info("--> verifying get with routing, should find"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("alias-a0", "type1", "1").execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("alias-a0", "1").execute().actionGet().isExists(), equalTo(true)); } logger.info("--> indexing with id [0], and routing [1] using alias to test-b"); - client().prepareIndex("alias-b1", "type1", "1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("alias-b1").setId("1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> verifying get with no routing, should not find anything"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test-a", "type1", "1").execute().actionGet().isExists(), equalTo(false)); + assertThat(client().prepareGet("test-a", "1").execute().actionGet().isExists(), equalTo(false)); } logger.info("--> verifying get with routing, should find"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("alias-b1", "type1", "1").execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("alias-b1", "1").execute().actionGet().isExists(), equalTo(true)); } logger.info("--> search with alias-a1,alias-b0, should not find"); @@ -594,9 +594,9 @@ public void testAliasSearchRoutingWithConcreteAndAliasedIndices_issue2682() thro assertAcked(admin().indices().prepareAliases().addAliasAction(AliasActions.add().index("index").alias("index_1").routing("1"))); logger.info("--> indexing on index_1 which is an alias for index with routing [1]"); - client().prepareIndex("index_1", "type1", "1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index_1").setId("1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> indexing on index_2 which is a concrete index"); - client().prepareIndex("index_2", "type2", "2").setSource("field", "value2").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index_2").setId("2").setSource("field", "value2").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> search all on index_* should find two"); for (int i = 0; i < 5; i++) { @@ -625,9 +625,9 @@ public void testAliasSearchRoutingWithConcreteAndAliasedIndices_issue3268() thro assertAcked(admin().indices().prepareAliases().addAliasAction(AliasActions.add().index("index").alias("index_1").routing("1"))); logger.info("--> indexing on index_1 which is an alias for index with routing [1]"); - client().prepareIndex("index_1", "type1", "1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index_1").setId("1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> indexing on index_2 which is a concrete index"); - client().prepareIndex("index_2", "type2", "2").setSource("field", "value2").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index_2").setId("2").setSource("field", "value2").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); SearchResponse searchResponse = client().prepareSearch("index_*") .setSearchType(SearchType.QUERY_THEN_FETCH) @@ -650,12 +650,12 @@ public void testIndexingAliasesOverTime() throws Exception { assertAcked(admin().indices().prepareAliases().addAliasAction(AliasActions.add().index("test").alias("alias").routing("3"))); logger.info("--> indexing with id [0], and routing [3]"); - client().prepareIndex("alias", "type1", "0").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("alias").setId("0").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> verifying get with no routing, should not find anything"); logger.info("--> verifying get and search with routing, should find"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test", "type1", "0").setRouting("3").execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("test", "0").setRouting("3").execute().actionGet().isExists(), equalTo(true)); assertThat( client().prepareSearch("alias") .setQuery(QueryBuilders.matchAllQuery()) @@ -712,13 +712,13 @@ public void testIndexingAliasesOverTime() throws Exception { ); logger.info("--> indexing with id [1], and routing [4]"); - client().prepareIndex("alias", "type1", "1").setSource("field", "value2").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("alias").setId("1").setSource("field", "value2").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> verifying get with no routing, should not find anything"); logger.info("--> verifying get and search with routing, should find"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test", "type1", "0").setRouting("3").execute().actionGet().isExists(), equalTo(true)); - assertThat(client().prepareGet("test", "type1", "1").setRouting("4").execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("test", "0").setRouting("3").execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("test", "1").setRouting("4").execute().actionGet().isExists(), equalTo(true)); assertThat( client().prepareSearch("alias") .setQuery(QueryBuilders.matchAllQuery()) diff --git a/server/src/internalClusterTest/java/org/opensearch/routing/PartitionedRoutingIT.java b/server/src/internalClusterTest/java/org/opensearch/routing/PartitionedRoutingIT.java index 8dd2bd7c1235e..a64e857f089f0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/routing/PartitionedRoutingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/routing/PartitionedRoutingIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.query.QueryBuilders; import org.opensearch.test.OpenSearchIntegTestCase; import org.mockito.internal.util.collections.Sets; @@ -62,7 +61,7 @@ public void testVariousPartitionSizes() throws Exception { .put("index.number_of_routing_shards", shards) .put("index.routing_partition_size", partitionSize) ) - .addMapping("type", "{\"type\":{\"_routing\":{\"required\":true}}}", XContentType.JSON) + .setMapping("{\"_routing\":{\"required\":true}}") .execute() .actionGet(); ensureGreen(); @@ -96,7 +95,7 @@ public void testShrinking() throws Exception { .put("index.number_of_replicas", numberOfReplicas()) .put("index.routing_partition_size", partitionSize) ) - .addMapping("type", "{\"type\":{\"_routing\":{\"required\":true}}}", XContentType.JSON) + .setMapping("{\"_routing\":{\"required\":true}}}") .execute() .actionGet(); ensureGreen(); @@ -231,7 +230,7 @@ private void verifyGets(String index, Map> routingToDocument String routing = routingEntry.getKey(); for (String id : routingEntry.getValue()) { - assertTrue(client().prepareGet(index, "type", id).setRouting(routing).execute().actionGet().isExists()); + assertTrue(client().prepareGet(index, id).setRouting(routing).execute().actionGet().isExists()); } } } @@ -249,7 +248,7 @@ private Map> generateRoutedDocumentIds(String index) { String id = routingValue + "_" + String.valueOf(k); routingToDocumentIds.get(routingValue).add(id); - client().prepareIndex(index, "type", id).setRouting(routingValue).setSource("foo", "bar").get(); + client().prepareIndex(index).setId(id).setRouting(routingValue).setSource("foo", "bar").get(); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/routing/SimpleRoutingIT.java b/server/src/internalClusterTest/java/org/opensearch/routing/SimpleRoutingIT.java index ae9f08f5c5489..f3179a0c4acb3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/routing/SimpleRoutingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/routing/SimpleRoutingIT.java @@ -93,47 +93,49 @@ public void testSimpleCrudRouting() throws Exception { ensureGreen(); String routingValue = findNonMatchingRoutingValue("test", "1"); logger.info("--> indexing with id [1], and routing [{}]", routingValue); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setRouting(routingValue) .setSource("field", "value1") .setRefreshPolicy(RefreshPolicy.IMMEDIATE) .get(); logger.info("--> verifying get with no routing, should not find anything"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false)); + assertThat(client().prepareGet("test", "1").execute().actionGet().isExists(), equalTo(false)); } logger.info("--> verifying get with routing, should find"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test", "type1", "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("test", "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(true)); } logger.info("--> deleting with no routing, should not delete anything"); - client().prepareDelete("test", "type1", "1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareDelete("test", "1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false)); - assertThat(client().prepareGet("test", "type1", "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("test", "1").execute().actionGet().isExists(), equalTo(false)); + assertThat(client().prepareGet("test", "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(true)); } logger.info("--> deleting with routing, should delete"); - client().prepareDelete("test", "type1", "1").setRouting(routingValue).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareDelete("test", "1").setRouting(routingValue).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false)); - assertThat(client().prepareGet("test", "type1", "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(false)); + assertThat(client().prepareGet("test", "1").execute().actionGet().isExists(), equalTo(false)); + assertThat(client().prepareGet("test", "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(false)); } logger.info("--> indexing with id [1], and routing [0]"); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setRouting(routingValue) .setSource("field", "value1") .setRefreshPolicy(RefreshPolicy.IMMEDIATE) .get(); logger.info("--> verifying get with no routing, should not find anything"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false)); + assertThat(client().prepareGet("test", "1").execute().actionGet().isExists(), equalTo(false)); } logger.info("--> verifying get with routing, should find"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test", "type1", "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("test", "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(true)); } } @@ -143,18 +145,19 @@ public void testSimpleSearchRouting() { String routingValue = findNonMatchingRoutingValue("test", "1"); logger.info("--> indexing with id [1], and routing [{}]", routingValue); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setRouting(routingValue) .setSource("field", "value1") .setRefreshPolicy(RefreshPolicy.IMMEDIATE) .get(); logger.info("--> verifying get with no routing, should not find anything"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false)); + assertThat(client().prepareGet("test", "1").execute().actionGet().isExists(), equalTo(false)); } logger.info("--> verifying get with routing, should find"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test", "type1", "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("test", "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(true)); } logger.info("--> search with no routing, should fine one"); @@ -217,7 +220,8 @@ public void testSimpleSearchRouting() { String secondRoutingValue = "1"; logger.info("--> indexing with id [{}], and routing [{}]", routingValue, secondRoutingValue); - client().prepareIndex("test", "type1", routingValue) + client().prepareIndex("test") + .setId(routingValue) .setRouting(secondRoutingValue) .setSource("field", "value1") .setRefreshPolicy(RefreshPolicy.IMMEDIATE) @@ -364,7 +368,8 @@ public void testRequiredRoutingCrudApis() throws Exception { String routingValue = findNonMatchingRoutingValue("test", "1"); logger.info("--> indexing with id [1], and routing [{}]", routingValue); - client().prepareIndex(indexOrAlias(), "type1", "1") + client().prepareIndex(indexOrAlias()) + .setId("1") .setRouting(routingValue) .setSource("field", "value1") .setRefreshPolicy(RefreshPolicy.IMMEDIATE) @@ -373,7 +378,7 @@ public void testRequiredRoutingCrudApis() throws Exception { logger.info("--> indexing with id [1], with no routing, should fail"); try { - client().prepareIndex(indexOrAlias(), "type1", "1").setSource("field", "value1").get(); + client().prepareIndex(indexOrAlias()).setId("1").setSource("field", "value1").get(); fail("index with missing routing when routing is required should fail"); } catch (OpenSearchException e) { assertThat(e.unwrapCause(), instanceOf(RoutingMissingException.class)); @@ -381,15 +386,12 @@ public void testRequiredRoutingCrudApis() throws Exception { logger.info("--> verifying get with routing, should find"); for (int i = 0; i < 5; i++) { - assertThat( - client().prepareGet(indexOrAlias(), "type1", "1").setRouting(routingValue).execute().actionGet().isExists(), - equalTo(true) - ); + assertThat(client().prepareGet(indexOrAlias(), "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(true)); } logger.info("--> deleting with no routing, should fail"); try { - client().prepareDelete(indexOrAlias(), "type1", "1").get(); + client().prepareDelete(indexOrAlias(), "1").get(); fail("delete with missing routing when routing is required should fail"); } catch (OpenSearchException e) { assertThat(e.unwrapCause(), instanceOf(RoutingMissingException.class)); @@ -397,61 +399,49 @@ public void testRequiredRoutingCrudApis() throws Exception { for (int i = 0; i < 5; i++) { try { - client().prepareGet(indexOrAlias(), "type1", "1").execute().actionGet().isExists(); + client().prepareGet(indexOrAlias(), "1").execute().actionGet().isExists(); fail("get with missing routing when routing is required should fail"); } catch (RoutingMissingException e) { assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); - assertThat(e.getMessage(), equalTo("routing is required for [test]/[type1]/[1]")); + assertThat(e.getMessage(), equalTo("routing is required for [test]/[1]")); } - assertThat( - client().prepareGet(indexOrAlias(), "type1", "1").setRouting(routingValue).execute().actionGet().isExists(), - equalTo(true) - ); + assertThat(client().prepareGet(indexOrAlias(), "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(true)); } try { - client().prepareUpdate(indexOrAlias(), "type1", "1") - .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2") - .execute() - .actionGet(); + client().prepareUpdate(indexOrAlias(), "1").setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2").execute().actionGet(); fail("update with missing routing when routing is required should fail"); } catch (OpenSearchException e) { assertThat(e.unwrapCause(), instanceOf(RoutingMissingException.class)); } - client().prepareUpdate(indexOrAlias(), "type1", "1") - .setRouting(routingValue) - .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2") - .get(); + client().prepareUpdate(indexOrAlias(), "1").setRouting(routingValue).setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2").get(); client().admin().indices().prepareRefresh().execute().actionGet(); for (int i = 0; i < 5; i++) { try { - client().prepareGet(indexOrAlias(), "type1", "1").execute().actionGet().isExists(); + client().prepareGet(indexOrAlias(), "1").execute().actionGet().isExists(); fail(); } catch (RoutingMissingException e) { assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); - assertThat(e.getMessage(), equalTo("routing is required for [test]/[type1]/[1]")); + assertThat(e.getMessage(), equalTo("routing is required for [test]/[1]")); } - GetResponse getResponse = client().prepareGet(indexOrAlias(), "type1", "1").setRouting(routingValue).execute().actionGet(); + GetResponse getResponse = client().prepareGet(indexOrAlias(), "1").setRouting(routingValue).execute().actionGet(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getSourceAsMap().get("field"), equalTo("value2")); } - client().prepareDelete(indexOrAlias(), "type1", "1").setRouting(routingValue).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareDelete(indexOrAlias(), "1").setRouting(routingValue).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); for (int i = 0; i < 5; i++) { try { - client().prepareGet(indexOrAlias(), "type1", "1").execute().actionGet().isExists(); + client().prepareGet(indexOrAlias(), "1").execute().actionGet().isExists(); fail(); } catch (RoutingMissingException e) { assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); - assertThat(e.getMessage(), equalTo("routing is required for [test]/[type1]/[1]")); + assertThat(e.getMessage(), equalTo("routing is required for [test]/[1]")); } - assertThat( - client().prepareGet(indexOrAlias(), "type1", "1").setRouting(routingValue).execute().actionGet().isExists(), - equalTo(false) - ); + assertThat(client().prepareGet(indexOrAlias(), "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(false)); } } @@ -476,7 +466,7 @@ public void testRequiredRoutingBulk() throws Exception { ensureGreen(); { BulkResponse bulkResponse = client().prepareBulk() - .add(Requests.indexRequest(indexOrAlias()).type("type1").id("1").source(Requests.INDEX_CONTENT_TYPE, "field", "value")) + .add(Requests.indexRequest(indexOrAlias()).id("1").source(Requests.INDEX_CONTENT_TYPE, "field", "value")) .execute() .actionGet(); assertThat(bulkResponse.getItems().length, equalTo(1)); @@ -487,19 +477,13 @@ public void testRequiredRoutingBulk() throws Exception { assertThat(bulkItemResponse.getOpType(), equalTo(DocWriteRequest.OpType.INDEX)); assertThat(bulkItemResponse.getFailure().getStatus(), equalTo(RestStatus.BAD_REQUEST)); assertThat(bulkItemResponse.getFailure().getCause(), instanceOf(RoutingMissingException.class)); - assertThat(bulkItemResponse.getFailureMessage(), containsString("routing is required for [test]/[type1]/[1]")); + assertThat(bulkItemResponse.getFailureMessage(), containsString("routing is required for [test]/[1]")); } } { BulkResponse bulkResponse = client().prepareBulk() - .add( - Requests.indexRequest(indexOrAlias()) - .type("type1") - .id("1") - .routing("0") - .source(Requests.INDEX_CONTENT_TYPE, "field", "value") - ) + .add(Requests.indexRequest(indexOrAlias()).id("1").routing("0").source(Requests.INDEX_CONTENT_TYPE, "field", "value")) .execute() .actionGet(); assertThat(bulkResponse.hasFailures(), equalTo(false)); @@ -507,7 +491,7 @@ public void testRequiredRoutingBulk() throws Exception { { BulkResponse bulkResponse = client().prepareBulk() - .add(new UpdateRequest(indexOrAlias(), "type1", "1").doc(Requests.INDEX_CONTENT_TYPE, "field", "value2")) + .add(new UpdateRequest(indexOrAlias(), "1").doc(Requests.INDEX_CONTENT_TYPE, "field", "value2")) .execute() .actionGet(); assertThat(bulkResponse.getItems().length, equalTo(1)); @@ -518,23 +502,20 @@ public void testRequiredRoutingBulk() throws Exception { assertThat(bulkItemResponse.getOpType(), equalTo(DocWriteRequest.OpType.UPDATE)); assertThat(bulkItemResponse.getFailure().getStatus(), equalTo(RestStatus.BAD_REQUEST)); assertThat(bulkItemResponse.getFailure().getCause(), instanceOf(RoutingMissingException.class)); - assertThat(bulkItemResponse.getFailureMessage(), containsString("routing is required for [test]/[type1]/[1]")); + assertThat(bulkItemResponse.getFailureMessage(), containsString("routing is required for [test]/[1]")); } } { BulkResponse bulkResponse = client().prepareBulk() - .add(new UpdateRequest(indexOrAlias(), "type1", "1").doc(Requests.INDEX_CONTENT_TYPE, "field", "value2").routing("0")) + .add(new UpdateRequest(indexOrAlias(), "1").doc(Requests.INDEX_CONTENT_TYPE, "field", "value2").routing("0")) .execute() .actionGet(); assertThat(bulkResponse.hasFailures(), equalTo(false)); } { - BulkResponse bulkResponse = client().prepareBulk() - .add(Requests.deleteRequest(indexOrAlias()).type("type1").id("1")) - .execute() - .actionGet(); + BulkResponse bulkResponse = client().prepareBulk().add(Requests.deleteRequest(indexOrAlias()).id("1")).execute().actionGet(); assertThat(bulkResponse.getItems().length, equalTo(1)); assertThat(bulkResponse.hasFailures(), equalTo(true)); @@ -543,13 +524,13 @@ public void testRequiredRoutingBulk() throws Exception { assertThat(bulkItemResponse.getOpType(), equalTo(DocWriteRequest.OpType.DELETE)); assertThat(bulkItemResponse.getFailure().getStatus(), equalTo(RestStatus.BAD_REQUEST)); assertThat(bulkItemResponse.getFailure().getCause(), instanceOf(RoutingMissingException.class)); - assertThat(bulkItemResponse.getFailureMessage(), containsString("routing is required for [test]/[type1]/[1]")); + assertThat(bulkItemResponse.getFailureMessage(), containsString("routing is required for [test]/[1]")); } } { BulkResponse bulkResponse = client().prepareBulk() - .add(Requests.deleteRequest(indexOrAlias()).type("type1").id("1").routing("0")) + .add(Requests.deleteRequest(indexOrAlias()).id("1").routing("0")) .execute() .actionGet(); assertThat(bulkResponse.getItems().length, equalTo(1)); @@ -579,30 +560,28 @@ public void testRequiredRoutingMappingVariousAPIs() throws Exception { ensureGreen(); String routingValue = findNonMatchingRoutingValue("test", "1"); logger.info("--> indexing with id [1], and routing [{}]", routingValue); - client().prepareIndex(indexOrAlias(), "type1", "1").setRouting(routingValue).setSource("field", "value1").get(); + client().prepareIndex(indexOrAlias()).setId("1").setRouting(routingValue).setSource("field", "value1").get(); logger.info("--> indexing with id [2], and routing [{}]", routingValue); - client().prepareIndex(indexOrAlias(), "type1", "2") + client().prepareIndex(indexOrAlias()) + .setId("2") .setRouting(routingValue) .setSource("field", "value2") .setRefreshPolicy(RefreshPolicy.IMMEDIATE) .get(); logger.info("--> verifying get with id [1] with routing [0], should succeed"); - assertThat( - client().prepareGet(indexOrAlias(), "type1", "1").setRouting(routingValue).execute().actionGet().isExists(), - equalTo(true) - ); + assertThat(client().prepareGet(indexOrAlias(), "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(true)); logger.info("--> verifying get with id [1], with no routing, should fail"); try { - client().prepareGet(indexOrAlias(), "type1", "1").get(); + client().prepareGet(indexOrAlias(), "1").get(); fail(); } catch (RoutingMissingException e) { - assertThat(e.getMessage(), equalTo("routing is required for [test]/[type1]/[1]")); + assertThat(e.getMessage(), equalTo("routing is required for [test]/[1]")); } logger.info("--> verifying explain with id [2], with routing [0], should succeed"); - ExplainResponse explainResponse = client().prepareExplain(indexOrAlias(), "type1", "2") + ExplainResponse explainResponse = client().prepareExplain(indexOrAlias(), "2") .setQuery(QueryBuilders.matchAllQuery()) .setRouting(routingValue) .get(); @@ -611,25 +590,25 @@ public void testRequiredRoutingMappingVariousAPIs() throws Exception { logger.info("--> verifying explain with id [2], with no routing, should fail"); try { - client().prepareExplain(indexOrAlias(), "type1", "2").setQuery(QueryBuilders.matchAllQuery()).get(); + client().prepareExplain(indexOrAlias(), "2").setQuery(QueryBuilders.matchAllQuery()).get(); fail(); } catch (RoutingMissingException e) { - assertThat(e.getMessage(), equalTo("routing is required for [test]/[type1]/[2]")); + assertThat(e.getMessage(), equalTo("routing is required for [test]/[2]")); } logger.info("--> verifying term vector with id [1], with routing [0], should succeed"); - TermVectorsResponse termVectorsResponse = client().prepareTermVectors(indexOrAlias(), "type1", "1").setRouting(routingValue).get(); + TermVectorsResponse termVectorsResponse = client().prepareTermVectors(indexOrAlias(), "1").setRouting(routingValue).get(); assertThat(termVectorsResponse.isExists(), equalTo(true)); assertThat(termVectorsResponse.getId(), equalTo("1")); try { - client().prepareTermVectors(indexOrAlias(), "type1", "1").get(); + client().prepareTermVectors(indexOrAlias(), "1").get(); fail(); } catch (RoutingMissingException e) { - assertThat(e.getMessage(), equalTo("routing is required for [test]/[type1]/[1]")); + assertThat(e.getMessage(), equalTo("routing is required for [test]/[1]")); } - UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setRouting(routingValue) .setDoc(Requests.INDEX_CONTENT_TYPE, "field1", "value1") .get(); @@ -637,16 +616,16 @@ public void testRequiredRoutingMappingVariousAPIs() throws Exception { assertThat(updateResponse.getVersion(), equalTo(2L)); try { - client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(Requests.INDEX_CONTENT_TYPE, "field1", "value1").get(); + client().prepareUpdate(indexOrAlias(), "1").setDoc(Requests.INDEX_CONTENT_TYPE, "field1", "value1").get(); fail(); } catch (RoutingMissingException e) { - assertThat(e.getMessage(), equalTo("routing is required for [test]/[type1]/[1]")); + assertThat(e.getMessage(), equalTo("routing is required for [test]/[1]")); } logger.info("--> verifying mget with ids [1,2], with routing [0], should succeed"); MultiGetResponse multiGetResponse = client().prepareMultiGet() - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").routing("0")) - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").routing("0")) + .add(new MultiGetRequest.Item(indexOrAlias(), "1").routing("0")) + .add(new MultiGetRequest.Item(indexOrAlias(), "2").routing("0")) .get(); assertThat(multiGetResponse.getResponses().length, equalTo(2)); assertThat(multiGetResponse.getResponses()[0].isFailed(), equalTo(false)); @@ -656,20 +635,20 @@ public void testRequiredRoutingMappingVariousAPIs() throws Exception { logger.info("--> verifying mget with ids [1,2], with no routing, should fail"); multiGetResponse = client().prepareMultiGet() - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1")) - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2")) + .add(new MultiGetRequest.Item(indexOrAlias(), "1")) + .add(new MultiGetRequest.Item(indexOrAlias(), "2")) .get(); assertThat(multiGetResponse.getResponses().length, equalTo(2)); assertThat(multiGetResponse.getResponses()[0].isFailed(), equalTo(true)); assertThat(multiGetResponse.getResponses()[0].getFailure().getId(), equalTo("1")); - assertThat(multiGetResponse.getResponses()[0].getFailure().getMessage(), equalTo("routing is required for [test]/[type1]/[1]")); + assertThat(multiGetResponse.getResponses()[0].getFailure().getMessage(), equalTo("routing is required for [test]/[1]")); assertThat(multiGetResponse.getResponses()[1].isFailed(), equalTo(true)); assertThat(multiGetResponse.getResponses()[1].getFailure().getId(), equalTo("2")); - assertThat(multiGetResponse.getResponses()[1].getFailure().getMessage(), equalTo("routing is required for [test]/[type1]/[2]")); + assertThat(multiGetResponse.getResponses()[1].getFailure().getMessage(), equalTo("routing is required for [test]/[2]")); MultiTermVectorsResponse multiTermVectorsResponse = client().prepareMultiTermVectors() - .add(new TermVectorsRequest(indexOrAlias(), "type1", "1").routing(routingValue)) - .add(new TermVectorsRequest(indexOrAlias(), "type1", "2").routing(routingValue)) + .add(new TermVectorsRequest(indexOrAlias(), "1").routing(routingValue)) + .add(new TermVectorsRequest(indexOrAlias(), "2").routing(routingValue)) .get(); assertThat(multiTermVectorsResponse.getResponses().length, equalTo(2)); assertThat(multiTermVectorsResponse.getResponses()[0].getId(), equalTo("1")); @@ -682,15 +661,15 @@ public void testRequiredRoutingMappingVariousAPIs() throws Exception { assertThat(multiTermVectorsResponse.getResponses()[1].getResponse().isExists(), equalTo(true)); multiTermVectorsResponse = client().prepareMultiTermVectors() - .add(new TermVectorsRequest(indexOrAlias(), "type1", "1")) - .add(new TermVectorsRequest(indexOrAlias(), "type1", "2")) + .add(new TermVectorsRequest(indexOrAlias(), "1")) + .add(new TermVectorsRequest(indexOrAlias(), "2")) .get(); assertThat(multiTermVectorsResponse.getResponses().length, equalTo(2)); assertThat(multiTermVectorsResponse.getResponses()[0].getId(), equalTo("1")); assertThat(multiTermVectorsResponse.getResponses()[0].isFailed(), equalTo(true)); assertThat( multiTermVectorsResponse.getResponses()[0].getFailure().getCause().getMessage(), - equalTo("routing is required for [test]/[type1]/[1]") + equalTo("routing is required for [test]/[1]") ); assertThat(multiTermVectorsResponse.getResponses()[0].getResponse(), nullValue()); assertThat(multiTermVectorsResponse.getResponses()[1].getId(), equalTo("2")); @@ -698,7 +677,7 @@ public void testRequiredRoutingMappingVariousAPIs() throws Exception { assertThat(multiTermVectorsResponse.getResponses()[1].getResponse(), nullValue()); assertThat( multiTermVectorsResponse.getResponses()[1].getFailure().getCause().getMessage(), - equalTo("routing is required for [test]/[type1]/[2]") + equalTo("routing is required for [test]/[2]") ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/script/ScriptCacheIT.java b/server/src/internalClusterTest/java/org/opensearch/script/ScriptCacheIT.java index 8c4714aaf91f9..b4823bb482bfa 100644 --- a/server/src/internalClusterTest/java/org/opensearch/script/ScriptCacheIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/script/ScriptCacheIT.java @@ -90,17 +90,14 @@ protected Collection> nodePlugins() { } public void testPainlessCompilationLimit429Error() throws Exception { - client().prepareIndex("test", "1") - .setId("1") - .setSource(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject()) - .get(); + client().prepareIndex("test").setId("1").setSource(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject()).get(); ensureGreen(); Map params = new HashMap<>(); params.put("field", "field"); Script script = new Script(ScriptType.INLINE, "mockscript", "increase_field", params); ExecutionException exception = expectThrows( ExecutionException.class, - () -> client().prepareUpdate("test", "1", "1").setScript(script).execute().get() + () -> client().prepareUpdate("test", "1").setScript(script).execute().get() ); Throwable rootCause = getRootCause(exception); assertTrue(rootCause instanceof OpenSearchException); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java index 37e7e10968983..da5698918cf99 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java @@ -113,7 +113,7 @@ private void indexTestData() { // Make sure we have a few segments BulkRequestBuilder bulkRequestBuilder = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); for (int j = 0; j < 20; j++) { - bulkRequestBuilder.add(client().prepareIndex("test", "type", Integer.toString(i * 5 + j)).setSource("field", "value")); + bulkRequestBuilder.add(client().prepareIndex("test").setId(Integer.toString(i * 5 + j)).setSource("field", "value")); } assertNoFailures(bulkRequestBuilder.get()); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java index 3c0b948adfb9b..049dcb50024ba 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java @@ -68,7 +68,7 @@ protected Settings nodeSettings(int nodeOrdinal) { public void testSimpleTimeout() throws Exception { for (int i = 0; i < 32; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value").get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value").get(); } refresh("test"); @@ -81,7 +81,7 @@ public void testSimpleTimeout() throws Exception { } public void testPartialResultsIntolerantTimeout() throws Exception { - client().prepareIndex("test", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); OpenSearchException ex = expectThrows( OpenSearchException.class, diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchWithRejectionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchWithRejectionsIT.java index 15892b6378d42..30e6aa4cd31fc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/SearchWithRejectionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchWithRejectionsIT.java @@ -61,7 +61,7 @@ public void testOpenContextsAfterRejections() throws Exception { ensureGreen("test"); final int docs = scaledRandomIntBetween(20, 50); for (int i = 0; i < docs; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value").get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value").get(); } IndicesStatsResponse indicesStats = client().admin().indices().prepareStats().get(); assertThat(indicesStats.getTotal().getSearch().getOpenContexts(), equalTo(0L)); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java b/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java index 03a5c4d71da4d..96f3f710e4b7d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java @@ -63,7 +63,7 @@ public void testStressReaper() throws ExecutionException, InterruptedException { int num = randomIntBetween(100, 150); IndexRequestBuilder[] builders = new IndexRequestBuilder[num]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex("test", "type", "" + i).setSource("f", English.intToEnglish(i)); + builders[i] = client().prepareIndex("test").setId("" + i).setSource("f", English.intToEnglish(i)); } createIndex("test"); indexRandom(true, builders); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java index 3af781d69efdc..303b84151cf3e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java @@ -56,7 +56,7 @@ public void setupSuiteScopeCluster() throws Exception { numDocs = randomIntBetween(1, 20); List docs = new ArrayList<>(); for (int i = 0; i < numDocs; ++i) { - docs.add(client().prepareIndex("index", "type").setSource("f", Integer.toString(i / 3))); + docs.add(client().prepareIndex("index").setSource("f", Integer.toString(i / 3))); } indexRandom(true, docs); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java index a786a59a5a9c1..f49938fb27e72 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java @@ -73,11 +73,11 @@ public void testMultipleAggsOnSameField_WithDifferentRequiredValueSourceType() t String name = "name_" + randomIntBetween(1, 10); if (rarely()) { missingValues++; - builders[i] = client().prepareIndex("idx", "type").setSource(jsonBuilder().startObject().field("name", name).endObject()); + builders[i] = client().prepareIndex("idx").setSource(jsonBuilder().startObject().field("name", name).endObject()); } else { int value = randomIntBetween(1, 10); values.put(value, values.getOrDefault(value, 0) + 1); - builders[i] = client().prepareIndex("idx", "type") + builders[i] = client().prepareIndex("idx") .setSource(jsonBuilder().startObject().field("name", name).field("value", value).endObject()); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java index 96cec5257ada8..29c325d01492e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java @@ -152,7 +152,7 @@ public void testRandomRanges() throws Exception { source = source.value(docs[i][j]); } source = source.endArray().endObject(); - client().prepareIndex("idx", "type").setSource(source).get(); + client().prepareIndex("idx").setSource(source).get(); } assertNoFailures(client().admin().indices().prepareRefresh("idx").setIndicesOptions(IndicesOptions.lenientExpandOpen()).get()); @@ -283,7 +283,7 @@ public void testDuelTerms() throws Exception { source = source.value(Integer.toString(values[j])); } source = source.endArray().endObject(); - indexingRequests.add(client().prepareIndex("idx", "type").setSource(source)); + indexingRequests.add(client().prepareIndex("idx").setSource(source)); } indexRandom(true, indexingRequests); @@ -387,7 +387,7 @@ public void testDuelTermsHistogram() throws Exception { source = source.value(randomFrom(values)); } source = source.endArray().endObject(); - client().prepareIndex("idx", "type").setSource(source).get(); + client().prepareIndex("idx").setSource(source).get(); } assertNoFailures( client().admin().indices().prepareRefresh("idx").setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().get() @@ -439,7 +439,7 @@ public void testLargeNumbersOfPercentileBuckets() throws Exception { logger.info("Indexing [{}] docs", numDocs); List indexingRequests = new ArrayList<>(); for (int i = 0; i < numDocs; ++i) { - indexingRequests.add(client().prepareIndex("idx", "type", Integer.toString(i)).setSource("double_value", randomDouble())); + indexingRequests.add(client().prepareIndex("idx").setId(Integer.toString(i)).setSource("double_value", randomDouble())); } indexRandom(true, indexingRequests); @@ -458,7 +458,7 @@ public void testLargeNumbersOfPercentileBuckets() throws Exception { public void testReduce() throws Exception { createIndex("idx"); final int value = randomIntBetween(0, 10); - indexRandom(true, client().prepareIndex("idx", "type").setSource("f", value)); + indexRandom(true, client().prepareIndex("idx").setSource("f", value)); SearchResponse response = client().prepareSearch("idx") .addAggregation( filter("filter", QueryBuilders.matchAllQuery()).subAggregation( @@ -518,7 +518,7 @@ public void testDuelDepthBreadthFirst() throws Exception { final int v1 = randomInt(1 << randomInt(7)); final int v2 = randomInt(1 << randomInt(7)); final int v3 = randomInt(1 << randomInt(7)); - reqs.add(client().prepareIndex("idx", "type").setSource("f1", v1, "f2", v2, "f3", v3)); + reqs.add(client().prepareIndex("idx").setSource("f1", v1, "f2", v2, "f3", v3)); } indexRandom(true, reqs); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/FiltersAggsRewriteIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/FiltersAggsRewriteIT.java index 5086468b6a673..3b8431c50a3ee 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/FiltersAggsRewriteIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/FiltersAggsRewriteIT.java @@ -52,9 +52,9 @@ public class FiltersAggsRewriteIT extends OpenSearchSingleNodeTestCase { public void testWrapperQueryIsRewritten() throws IOException { createIndex("test", Settings.EMPTY, "test", "title", "type=text"); - client().prepareIndex("test", "test", "1").setSource("title", "foo bar baz").get(); - client().prepareIndex("test", "test", "2").setSource("title", "foo foo foo").get(); - client().prepareIndex("test", "test", "3").setSource("title", "bar baz bax").get(); + client().prepareIndex("test").setId("1").setSource("title", "foo bar baz").get(); + client().prepareIndex("test").setId("2").setSource("title", "foo foo foo").get(); + client().prepareIndex("test").setId("3").setSource("title", "bar baz bax").get(); client().admin().indices().prepareRefresh("test").get(); XContentType xContentType = randomFrom(XContentType.values()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java index e1c339c38d1da..c00152a54bd37 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java @@ -57,7 +57,7 @@ public void testMetadataSetOnAggregationResult() throws Exception { IndexRequestBuilder[] builders = new IndexRequestBuilder[randomInt(30)]; for (int i = 0; i < builders.length; i++) { String name = "name_" + randomIntBetween(1, 10); - builders[i] = client().prepareIndex("idx", "type") + builders[i] = client().prepareIndex("idx") .setSource(jsonBuilder().startObject().field("name", name).field("value", randomInt()).endObject()); } indexRandom(true, builders); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java index a2831d9929f8b..9135ca0f0a364 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java @@ -72,8 +72,9 @@ protected void setupSuiteScopeCluster() throws Exception { ); indexRandom( true, - client().prepareIndex("idx", "type", "1").setSource(), - client().prepareIndex("idx", "type", "2") + client().prepareIndex("idx").setId("1").setSource(), + client().prepareIndex("idx") + .setId("2") .setSource("str", "foo", "long", 3L, "double", 5.5, "date", "2015-05-07", "location", "1,2") ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java index 0298a39ac37fa..5d54359152816 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java @@ -92,19 +92,19 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numTag1Docs; i++) { numSingleTag1Docs++; XContentBuilder source = jsonBuilder().startObject().field("value", i + 1).field("tag", "tag1").endObject(); - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); if (randomBoolean()) { // randomly index the document twice so that we have deleted // docs that match the filter - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); } } for (int i = numTag1Docs; i < (numTag1Docs + numTag2Docs); i++) { numSingleTag2Docs++; XContentBuilder source = jsonBuilder().startObject().field("value", i + 1).field("tag", "tag2").endObject(); - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); if (randomBoolean()) { - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); } } for (int i = numTag1Docs + numTag2Docs; i < numDocs; i++) { @@ -112,15 +112,16 @@ public void setupSuiteScopeCluster() throws Exception { numTag1Docs++; numTag2Docs++; XContentBuilder source = jsonBuilder().startObject().field("value", i + 1).array("tag", "tag1", "tag2").endObject(); - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); if (randomBoolean()) { - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); } } prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer").get(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/BooleanTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/BooleanTermsIT.java index 60d2c3c05a569..fc5407c4cade8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/BooleanTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/BooleanTermsIT.java @@ -86,7 +86,7 @@ public void setupSuiteScopeCluster() throws Exception { default: throw new AssertionError(); } - builders[i] = client().prepareIndex("idx", "type") + builders[i] = client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, singleValue) @@ -99,7 +99,6 @@ public void setupSuiteScopeCluster() throws Exception { public void testSingleValueField() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation(terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))) .get(); @@ -132,7 +131,6 @@ public void testSingleValueField() throws Exception { public void testMultiValueField() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation(terms("terms").field(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))) .get(); @@ -165,7 +163,6 @@ public void testMultiValueField() throws Exception { public void testUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx_unmapped") - .setTypes("type") .addAggregation( terms("terms").field(SINGLE_VALUED_FIELD_NAME).size(between(1, 5)).collectMode(randomFrom(SubAggCollectionMode.values())) ) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java index 8aa4d517d8f36..2c095857089e1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java @@ -41,7 +41,6 @@ import org.opensearch.common.time.DateFormatter; import org.opensearch.common.time.DateFormatters; import org.opensearch.common.time.DateMathParser; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.mapper.DateFieldMapper; import org.opensearch.index.query.MatchNoneQueryBuilder; import org.opensearch.index.query.QueryBuilders; @@ -113,7 +112,7 @@ private static String format(ZonedDateTime date, String pattern) { } private IndexRequestBuilder indexDoc(String idx, ZonedDateTime date, int value) throws Exception { - return client().prepareIndex(idx, "type") + return client().prepareIndex(idx) .setSource( jsonBuilder().startObject() .timeField("date", date) @@ -127,7 +126,7 @@ private IndexRequestBuilder indexDoc(String idx, ZonedDateTime date, int value) } private IndexRequestBuilder indexDoc(int month, int day, int value) throws Exception { - return client().prepareIndex("idx", "type") + return client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field("value", value) @@ -149,7 +148,8 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) ); } @@ -191,44 +191,44 @@ private void getMultiSortDocs(List builders) throws IOExcep assertAcked(client().admin().indices().prepareCreate("sort_idx").addMapping("type", "date", "type=date").get()); for (int i = 1; i <= 3; i++) { builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().timeField("date", date(1, 1)).field("l", 1).field("d", i).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().timeField("date", date(1, 2)).field("l", 2).field("d", i).endObject()) ); } builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().timeField("date", date(1, 3)).field("l", 3).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().timeField("date", date(1, 3).plusHours(1)).field("l", 3).field("d", 2).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().timeField("date", date(1, 4)).field("l", 3).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().timeField("date", date(1, 4).plusHours(2)).field("l", 3).field("d", 3).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().timeField("date", date(1, 5)).field("l", 5).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().timeField("date", date(1, 5).plusHours(12)).field("l", 5).field("d", 2).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().timeField("date", date(1, 6)).field("l", 5).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().timeField("date", date(1, 7)).field("l", 5).field("d", 1).endObject()) ); } @@ -1042,7 +1042,8 @@ public void testSingleValueWithTimeZone() throws Exception { IndexRequestBuilder[] reqs = new IndexRequestBuilder[5]; ZonedDateTime date = date("2014-03-11T00:00:00+00:00"); for (int i = 0; i < reqs.length; i++) { - reqs[i] = client().prepareIndex("idx2", "type", "" + i) + reqs[i] = client().prepareIndex("idx2") + .setId("" + i) .setSource(jsonBuilder().startObject().timeField("date", date).endObject()); date = date.plusHours(1); } @@ -1314,7 +1315,6 @@ public void testSingleValueFieldWithExtendedBoundsOffset() throws Exception { public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception { String mappingJson = Strings.toString( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("date") .field("type", "date") @@ -1322,12 +1322,12 @@ public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception .endObject() .endObject() .endObject() - .endObject() ); - prepareCreate("idx2").addMapping("type", mappingJson, XContentType.JSON).get(); + prepareCreate("idx2").setMapping(mappingJson).get(); IndexRequestBuilder[] reqs = new IndexRequestBuilder[5]; for (int i = 0; i < reqs.length; i++) { - reqs[i] = client().prepareIndex("idx2", "type", "" + i) + reqs[i] = client().prepareIndex("idx2") + .setId("" + i) .setSource(jsonBuilder().startObject().field("date", "10-03-2014").endObject()); } indexRandom(true, reqs); @@ -1397,8 +1397,8 @@ public void testDSTBoundaryIssue9491() throws InterruptedException, ExecutionExc assertAcked(client().admin().indices().prepareCreate("test9491").addMapping("type", "d", "type=date").get()); indexRandom( true, - client().prepareIndex("test9491", "type").setSource("d", "2014-10-08T13:00:00Z"), - client().prepareIndex("test9491", "type").setSource("d", "2014-11-08T13:00:00Z") + client().prepareIndex("test9491").setSource("d", "2014-10-08T13:00:00Z"), + client().prepareIndex("test9491").setSource("d", "2014-11-08T13:00:00Z") ); ensureSearchable("test9491"); SearchResponse response = client().prepareSearch("test9491") @@ -1420,9 +1420,9 @@ public void testIssue8209() throws InterruptedException, ExecutionException { assertAcked(client().admin().indices().prepareCreate("test8209").addMapping("type", "d", "type=date").get()); indexRandom( true, - client().prepareIndex("test8209", "type").setSource("d", "2014-01-01T00:00:00Z"), - client().prepareIndex("test8209", "type").setSource("d", "2014-04-01T00:00:00Z"), - client().prepareIndex("test8209", "type").setSource("d", "2014-04-30T00:00:00Z") + client().prepareIndex("test8209").setSource("d", "2014-01-01T00:00:00Z"), + client().prepareIndex("test8209").setSource("d", "2014-04-01T00:00:00Z"), + client().prepareIndex("test8209").setSource("d", "2014-04-30T00:00:00Z") ); ensureSearchable("test8209"); SearchResponse response = client().prepareSearch("test8209") @@ -1471,7 +1471,7 @@ public void testExceptionOnNegativeInterval() { */ public void testFormatIndexUnmapped() throws InterruptedException, ExecutionException { String indexDateUnmapped = "test31760"; - indexRandom(true, client().prepareIndex(indexDateUnmapped, "_doc").setSource("foo", "bar")); + indexRandom(true, client().prepareIndex(indexDateUnmapped).setSource("foo", "bar")); ensureSearchable(indexDateUnmapped); SearchResponse response = client().prepareSearch(indexDateUnmapped) @@ -1499,7 +1499,7 @@ public void testFormatIndexUnmapped() throws InterruptedException, ExecutionExce public void testRewriteTimeZone_EpochMillisFormat() throws InterruptedException, ExecutionException { String index = "test31392"; assertAcked(client().admin().indices().prepareCreate(index).addMapping("type", "d", "type=date,format=epoch_millis").get()); - indexRandom(true, client().prepareIndex(index, "type").setSource("d", "1477954800000")); + indexRandom(true, client().prepareIndex(index).setSource("d", "1477954800000")); ensureSearchable(index); SearchResponse response = client().prepareSearch(index) .addAggregation( @@ -1616,8 +1616,8 @@ public void testScriptCaching() throws Exception { String date2 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(date(2, 1)); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("d", date), - client().prepareIndex("cache_test_idx", "type", "2").setSource("d", date2) + client().prepareIndex("cache_test_idx").setId("1").setSource("d", date), + client().prepareIndex("cache_test_idx").setId("2").setSource("d", date2) ); // Make sure we are starting with a clear cache @@ -1788,7 +1788,6 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound private void assertMultiSortResponse(int[] expectedDays, BucketOrder... order) { ZonedDateTime[] expectedKeys = Arrays.stream(expectedDays).mapToObj(d -> date(1, d)).toArray(ZonedDateTime[]::new); SearchResponse response = client().prepareSearch("sort_idx") - .setTypes("type") .addAggregation( dateHistogram("histo").field("date") .dateHistogramInterval(DateHistogramInterval.DAY) @@ -1830,8 +1829,8 @@ private ZonedDateTime key(Histogram.Bucket bucket) { */ public void testDateNanosHistogram() throws Exception { assertAcked(prepareCreate("nanos").addMapping("_doc", "date", "type=date_nanos").get()); - indexRandom(true, client().prepareIndex("nanos", "_doc", "1").setSource("date", "2000-01-01")); - indexRandom(true, client().prepareIndex("nanos", "_doc", "2").setSource("date", "2000-01-02")); + indexRandom(true, client().prepareIndex("nanos").setId("1").setSource("date", "2000-01-01")); + indexRandom(true, client().prepareIndex("nanos").setId("2").setSource("date", "2000-01-02")); // Search interval 24 hours SearchResponse r = client().prepareSearch("nanos") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java index 5334709e60cd5..2505cb48245c3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -85,7 +85,8 @@ private void prepareIndex(ZonedDateTime date, int numHours, int stepSizeHours, i IndexRequestBuilder[] reqs = new IndexRequestBuilder[numHours]; for (int i = idxIdStart; i < idxIdStart + reqs.length; i++) { - reqs[i - idxIdStart] = client().prepareIndex("idx2", "type", "" + i) + reqs[i - idxIdStart] = client().prepareIndex("idx2") + .setId("" + i) .setSource(jsonBuilder().startObject().timeField("date", date).endObject()); date = date.plusHours(stepSizeHours); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java index 9c6ba1495a89c..7a28df00980cc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java @@ -79,7 +79,7 @@ public class DateRangeIT extends OpenSearchIntegTestCase { private static IndexRequestBuilder indexDoc(int month, int day, int value) throws Exception { - return client().prepareIndex("idx", "type") + return client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field("value", value) @@ -128,7 +128,8 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer")); for (int i = 0; i < 2; i++) { docs.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) ); } @@ -918,9 +919,11 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1") + client().prepareIndex("cache_test_idx") + .setId("1") .setSource(jsonBuilder().startObject().timeField("date", date(1, 1)).endObject()), - client().prepareIndex("cache_test_idx", "type", "2") + client().prepareIndex("cache_test_idx") + .setId("2") .setSource(jsonBuilder().startObject().timeField("date", date(2, 1)).endObject()) ); @@ -1070,9 +1073,9 @@ public void testRangeWithFormatStringValue() throws Exception { assertAcked(prepareCreate(indexName).addMapping("type", "date", "type=date,format=strict_hour_minute_second")); indexRandom( true, - client().prepareIndex(indexName, "type", "1").setSource(jsonBuilder().startObject().field("date", "00:16:40").endObject()), - client().prepareIndex(indexName, "type", "2").setSource(jsonBuilder().startObject().field("date", "00:33:20").endObject()), - client().prepareIndex(indexName, "type", "3").setSource(jsonBuilder().startObject().field("date", "00:50:00").endObject()) + client().prepareIndex(indexName).setId("1").setSource(jsonBuilder().startObject().field("date", "00:16:40").endObject()), + client().prepareIndex(indexName).setId("2").setSource(jsonBuilder().startObject().field("date", "00:33:20").endObject()), + client().prepareIndex(indexName).setId("3").setSource(jsonBuilder().startObject().field("date", "00:50:00").endObject()) ); // using no format should work when to/from is compatible with format in @@ -1132,9 +1135,9 @@ public void testRangeWithFormatNumericValue() throws Exception { assertAcked(prepareCreate(indexName).addMapping("type", "date", "type=date,format=epoch_second")); indexRandom( true, - client().prepareIndex(indexName, "type", "1").setSource(jsonBuilder().startObject().field("date", 1002).endObject()), - client().prepareIndex(indexName, "type", "2").setSource(jsonBuilder().startObject().field("date", 2000).endObject()), - client().prepareIndex(indexName, "type", "3").setSource(jsonBuilder().startObject().field("date", 3008).endObject()) + client().prepareIndex(indexName).setId("1").setSource(jsonBuilder().startObject().field("date", 1002).endObject()), + client().prepareIndex(indexName).setId("2").setSource(jsonBuilder().startObject().field("date", 2000).endObject()), + client().prepareIndex(indexName).setId("3").setSource(jsonBuilder().startObject().field("date", 3008).endObject()) ); // using no format should work when to/from is compatible with format in diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java index b2e24e73c2790..aa4bb671d14e8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java @@ -106,10 +106,12 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < data.length; i++) { String[] parts = data[i].split(","); - client().prepareIndex("test", "book", "" + i) + client().prepareIndex("test") + .setId("" + i) .setSource("author", parts[5], "name", parts[2], "genre", parts[8], "price", Float.parseFloat(parts[3])) .get(); - client().prepareIndex("idx_unmapped_author", "book", "" + i) + client().prepareIndex("idx_unmapped_author") + .setId("" + i) .setSource("name", parts[2], "genre", parts[8], "price", Float.parseFloat(parts[3])) .get(); } @@ -121,7 +123,6 @@ public void testIssue10719() throws Exception { // statement boolean asc = randomBoolean(); SearchResponse response = client().prepareSearch("test") - .setTypes("book") .setSearchType(SearchType.QUERY_THEN_FETCH) .addAggregation( terms("genres").field("genre") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java index cdfab74c58774..6c6e6ccc679e8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java @@ -147,7 +147,7 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < NUM_DOCS; i++) { builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, (double) i) @@ -164,7 +164,7 @@ public void setupSuiteScopeCluster() throws Exception { } for (int i = 0; i < 100; i++) { builders.add( - client().prepareIndex("high_card_idx", "type") + client().prepareIndex("high_card_idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, (double) i) @@ -181,7 +181,8 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) ); } @@ -240,44 +241,44 @@ private void getMultiSortDocs(List builders) throws IOExcep assertAcked(prepareCreate("sort_idx").addMapping("multi_sort_type", SINGLE_VALUED_FIELD_NAME, "type=double")); for (int i = 1; i <= 3; i++) { builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 1).field("l", 1).field("d", i).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 2).field("l", 2).field("d", i).endObject()) ); } builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3).field("l", 3).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3).field("l", 3).field("d", 2).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4).field("l", 3).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4).field("l", 3).field("d", 3).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5).field("l", 5).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5).field("l", 5).field("d", 2).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 6).field("l", 5).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 7).field("l", 5).field("d", 1).endObject()) ); } @@ -938,7 +939,6 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound private void assertMultiSortResponse(double[] expectedKeys, BucketOrder... order) { SearchResponse response = client().prepareSearch("sort_idx") - .setTypes("multi_sort_type") .addAggregation( terms("terms").field(SINGLE_VALUED_FIELD_NAME) .collectMode(randomFrom(SubAggCollectionMode.values())) @@ -986,8 +986,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1.5), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2.5) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1.5), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2.5) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java index 5af682ac1b68c..b938db8891d7b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java @@ -73,7 +73,8 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < numTag1Docs; i++) { builders.add( - client().prepareIndex("idx", "type", "" + i) + client().prepareIndex("idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field("value", i + 1).field("tag", "tag1").endObject()) ); } @@ -83,16 +84,17 @@ public void setupSuiteScopeCluster() throws Exception { .field("tag", "tag2") .field("name", "name" + i) .endObject(); - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); if (randomBoolean()) { // randomly index the document twice so that we have deleted docs that match the filter - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); } } prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer").get(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java index 4ab8e725551af..0b895f32a1259 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java @@ -80,10 +80,10 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < numTag1Docs; i++) { XContentBuilder source = jsonBuilder().startObject().field("value", i + 1).field("tag", "tag1").endObject(); - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); if (randomBoolean()) { // randomly index the document twice so that we have deleted docs that match the filter - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); } } for (int i = numTag1Docs; i < (numTag1Docs + numTag2Docs); i++) { @@ -92,9 +92,9 @@ public void setupSuiteScopeCluster() throws Exception { .field("tag", "tag2") .field("name", "name" + i) .endObject(); - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); if (randomBoolean()) { - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); } } for (int i = numTag1Docs + numTag2Docs; i < numDocs; i++) { @@ -104,15 +104,16 @@ public void setupSuiteScopeCluster() throws Exception { .field("tag", "tag3") .field("name", "name" + i) .endObject(); - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); if (randomBoolean()) { - builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); } } prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer").get(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java index e11eca4690234..a2d6533ae0afb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java @@ -87,7 +87,7 @@ private IndexRequestBuilder indexCity(String idx, String name, String... latLons } source.endArray(); source = source.endObject(); - return client().prepareIndex(idx, "type").setSource(source); + return client().prepareIndex(idx).setSource(source); } @Override @@ -142,7 +142,8 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field("value", i * 2).field("location", "52.0945, 5.116").endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoHashGridIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoHashGridIT.java index 3331748d48fd5..c7c21c203af61 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoHashGridIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoHashGridIT.java @@ -88,7 +88,7 @@ private static IndexRequestBuilder indexCity(String index, String name, List builders) throws IOExcep assertAcked(client().admin().indices().prepareCreate("sort_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=double").get()); for (int i = 1; i <= 3; i++) { builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 1).field("l", 1).field("d", i).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 2).field("l", 2).field("d", i).endObject()) ); } builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3).field("l", 3).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3.8).field("l", 3).field("d", 2).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4).field("l", 3).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4.4).field("l", 3).field("d", 3).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5).field("l", 5).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5.1).field("l", 5).field("d", 2).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 6).field("l", 5).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 7).field("l", 5).field("d", 1).endObject()) ); } @@ -1126,8 +1127,8 @@ public void testDecimalIntervalAndOffset() throws Exception { assertAcked(prepareCreate("decimal_values").addMapping("type", "d", "type=float").get()); indexRandom( true, - client().prepareIndex("decimal_values", "type", "1").setSource("d", -0.6), - client().prepareIndex("decimal_values", "type", "2").setSource("d", 0.1) + client().prepareIndex("decimal_values").setId("1").setSource("d", -0.6), + client().prepareIndex("decimal_values").setId("2").setSource("d", 0.1) ); SearchResponse r = client().prepareSearch("decimal_values") @@ -1156,8 +1157,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("d", -0.6), - client().prepareIndex("cache_test_idx", "type", "2").setSource("d", 0.1) + client().prepareIndex("cache_test_idx").setId("1").setSource("d", -0.6), + client().prepareIndex("cache_test_idx").setId("2").setSource("d", 0.1) ); // Make sure we are starting with a clear cache @@ -1351,9 +1352,9 @@ public void testHardBounds() throws Exception { assertAcked(prepareCreate("test").addMapping("type", "d", "type=double").get()); indexRandom( true, - client().prepareIndex("test", "type", "1").setSource("d", -0.6), - client().prepareIndex("test", "type", "2").setSource("d", 0.5), - client().prepareIndex("test", "type", "3").setSource("d", 0.1) + client().prepareIndex("test").setId("1").setSource("d", -0.6), + client().prepareIndex("test").setId("2").setSource("d", 0.5), + client().prepareIndex("test").setId("3").setSource("d", 0.1) ); SearchResponse r = client().prepareSearch("test") @@ -1391,7 +1392,6 @@ public void testHardBounds() throws Exception { private void assertMultiSortResponse(long[] expectedKeys, BucketOrder... order) { SearchResponse response = client().prepareSearch("sort_idx") - .setTypes("type") .addAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME) .interval(1) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java index a13017b130c4e..b768631225b90 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java @@ -75,9 +75,10 @@ public void setupSuiteScopeCluster() throws Exception { indexRandom( true, - client().prepareIndex("idx", "type", "1").setSource("ip", "192.168.1.7", "ips", Arrays.asList("192.168.0.13", "192.168.1.2")), - client().prepareIndex("idx", "type", "2").setSource("ip", "192.168.1.10", "ips", Arrays.asList("192.168.1.25", "192.168.1.28")), - client().prepareIndex("idx", "type", "3") + client().prepareIndex("idx").setId("1").setSource("ip", "192.168.1.7", "ips", Arrays.asList("192.168.0.13", "192.168.1.2")), + client().prepareIndex("idx").setId("2").setSource("ip", "192.168.1.10", "ips", Arrays.asList("192.168.1.25", "192.168.1.28")), + client().prepareIndex("idx") + .setId("3") .setSource("ip", "2001:db8::ff00:42:8329", "ips", Arrays.asList("2001:db8::ff00:42:8329", "2001:db8::ff00:42:8380")) ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpTermsIT.java index 6d5e75f613649..53ff70dd240d1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpTermsIT.java @@ -79,9 +79,9 @@ public void testScriptValue() throws Exception { assertAcked(prepareCreate("index").addMapping("type", "ip", "type=ip")); indexRandom( true, - client().prepareIndex("index", "type", "1").setSource("ip", "192.168.1.7"), - client().prepareIndex("index", "type", "2").setSource("ip", "192.168.1.7"), - client().prepareIndex("index", "type", "3").setSource("ip", "2001:db8::2:1") + client().prepareIndex("index").setId("1").setSource("ip", "192.168.1.7"), + client().prepareIndex("index").setId("2").setSource("ip", "192.168.1.7"), + client().prepareIndex("index").setId("3").setSource("ip", "2001:db8::2:1") ); Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['ip'].value", Collections.emptyMap()); @@ -107,9 +107,9 @@ public void testScriptValues() throws Exception { assertAcked(prepareCreate("index").addMapping("type", "ip", "type=ip")); indexRandom( true, - client().prepareIndex("index", "type", "1").setSource("ip", "192.168.1.7"), - client().prepareIndex("index", "type", "2").setSource("ip", "192.168.1.7"), - client().prepareIndex("index", "type", "3").setSource("ip", "2001:db8::2:1") + client().prepareIndex("index").setId("1").setSource("ip", "192.168.1.7"), + client().prepareIndex("index").setId("2").setSource("ip", "192.168.1.7"), + client().prepareIndex("index").setId("3").setSource("ip", "2001:db8::2:1") ); Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['ip']", Collections.emptyMap()); @@ -135,10 +135,10 @@ public void testMissingValue() throws Exception { assertAcked(prepareCreate("index").addMapping("type", "ip", "type=ip")); indexRandom( true, - client().prepareIndex("index", "type", "1").setSource("ip", "192.168.1.7"), - client().prepareIndex("index", "type", "2").setSource("ip", "192.168.1.7"), - client().prepareIndex("index", "type", "3").setSource("ip", "127.0.0.1"), - client().prepareIndex("index", "type", "4").setSource("not_ip", "something") + client().prepareIndex("index").setId("1").setSource("ip", "192.168.1.7"), + client().prepareIndex("index").setId("2").setSource("ip", "192.168.1.7"), + client().prepareIndex("index").setId("3").setSource("ip", "127.0.0.1"), + client().prepareIndex("index").setId("4").setSource("not_ip", "something") ); SearchResponse response = client().prepareSearch("index") .addAggregation(AggregationBuilders.terms("my_terms").field("ip").missing("127.0.0.1").executionHint(randomExecutionHint())) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java index d9c910eb0979b..115b30643ff21 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java @@ -133,7 +133,7 @@ public void setupSuiteScopeCluster() throws Exception { createIndex("idx", "high_card_idx"); IndexRequestBuilder[] lowCardBuilders = new IndexRequestBuilder[NUM_DOCS]; for (int i = 0; i < lowCardBuilders.length; i++) { - lowCardBuilders[i] = client().prepareIndex("idx", "type") + lowCardBuilders[i] = client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, i) @@ -149,7 +149,7 @@ public void setupSuiteScopeCluster() throws Exception { indexRandom(true, lowCardBuilders); IndexRequestBuilder[] highCardBuilders = new IndexRequestBuilder[100]; // TODO randomize the size? for (int i = 0; i < highCardBuilders.length; i++) { - highCardBuilders[i] = client().prepareIndex("high_card_idx", "type") + highCardBuilders[i] = client().prepareIndex("high_card_idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, i) @@ -168,7 +168,8 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) ); } @@ -227,44 +228,44 @@ private void getMultiSortDocs(List builders) throws IOExcep createIndex("sort_idx"); for (int i = 1; i <= 3; i++) { builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 1).field("l", 1).field("d", i).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 2).field("l", 2).field("d", i).endObject()) ); } builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3).field("l", 3).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3).field("l", 3).field("d", 2).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4).field("l", 3).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4).field("l", 3).field("d", 3).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5).field("l", 5).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5).field("l", 5).field("d", 2).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 6).field("l", 5).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "multi_sort_type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 7).field("l", 5).field("d", 1).endObject()) ); } @@ -886,7 +887,6 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound private void assertMultiSortResponse(long[] expectedKeys, BucketOrder... order) { SearchResponse response = client().prepareSearch("sort_idx") - .setTypes("multi_sort_type") .addAggregation( terms("terms").field(SINGLE_VALUED_FIELD_NAME) .collectMode(randomFrom(SubAggCollectionMode.values())) @@ -934,8 +934,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MinDocCountIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MinDocCountIT.java index f5507b17049e6..47cddbf856090 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MinDocCountIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MinDocCountIT.java @@ -139,7 +139,7 @@ public void setupSuiteScopeCluster() throws Exception { final int frequency = randomBoolean() ? 1 : randomIntBetween(2, 20); for (int j = 0; j < frequency; ++j) { indexRequests.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field("s", stringTerm) @@ -332,7 +332,6 @@ private void testMinDocCountOnTerms(String field, Script script, BucketOrder ord private void testMinDocCountOnTerms(String field, Script script, BucketOrder order, String include, boolean retry) throws Exception { // all terms final SearchResponse allTermsResponse = client().prepareSearch("idx") - .setTypes("type") .setSize(0) .setQuery(QUERY) .addAggregation( @@ -352,7 +351,6 @@ private void testMinDocCountOnTerms(String field, Script script, BucketOrder ord for (long minDocCount = 0; minDocCount < 20; ++minDocCount) { final int size = randomIntBetween(1, cardinality + 2); final SearchRequest request = client().prepareSearch("idx") - .setTypes("type") .setSize(0) .setQuery(QUERY) .addAggregation( @@ -407,7 +405,6 @@ public void testDateHistogramKeyDesc() throws Exception { private void testMinDocCountOnHistogram(BucketOrder order) throws Exception { final int interval = randomIntBetween(1, 3); final SearchResponse allResponse = client().prepareSearch("idx") - .setTypes("type") .setSize(0) .setQuery(QUERY) .addAggregation(histogram("histo").field("d").interval(interval).order(order).minDocCount(0)) @@ -417,7 +414,6 @@ private void testMinDocCountOnHistogram(BucketOrder order) throws Exception { for (long minDocCount = 0; minDocCount < 50; ++minDocCount) { final SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .setSize(0) .setQuery(QUERY) .addAggregation(histogram("histo").field("d").interval(interval).order(order).minDocCount(minDocCount)) @@ -428,7 +424,6 @@ private void testMinDocCountOnHistogram(BucketOrder order) throws Exception { private void testMinDocCountOnDateHistogram(BucketOrder order) throws Exception { final SearchResponse allResponse = client().prepareSearch("idx") - .setTypes("type") .setSize(0) .setQuery(QUERY) .addAggregation( @@ -440,7 +435,6 @@ private void testMinDocCountOnDateHistogram(BucketOrder order) throws Exception for (long minDocCount = 0; minDocCount < 50; ++minDocCount) { final SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .setSize(0) .setQuery(QUERY) .addAggregation( diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java index de6db070e1a89..f03a3bdeb1716 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java @@ -143,7 +143,7 @@ public void setupSuiteScopeCluster() throws Exception { if (randomBoolean()) { source.field("numeric_value", randomDouble()); } - client().prepareIndex("idx", "type").setSource(source.endObject()).get(); + client().prepareIndex("idx").setSource(source.endObject()).get(); } refresh(); ensureSearchable(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java index 17e894b411a2a..256281f8c6833 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java @@ -123,14 +123,15 @@ public void setupSuiteScopeCluster() throws Exception { source = source.startObject().field("value", i + 1 + j).endObject(); } source = source.endArray().endObject(); - builders.add(client().prepareIndex("idx", "type", "" + i + 1).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i + 1).setSource(source)); } prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer", "nested", "type=nested").get(); ensureGreen("empty_bucket_idx"); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource( jsonBuilder().startObject() .field("value", i * 2) @@ -178,7 +179,8 @@ public void setupSuiteScopeCluster() throws Exception { ensureGreen("idx_nested_nested_aggs"); builders.add( - client().prepareIndex("idx_nested_nested_aggs", "type", "1") + client().prepareIndex("idx_nested_nested_aggs") + .setId("1") .setSource( jsonBuilder().startObject() .startArray("nested1") @@ -458,7 +460,8 @@ public void testParentFilterResolvedCorrectly() throws Exception { List indexRequests = new ArrayList<>(2); indexRequests.add( - client().prepareIndex("idx2", "provider", "1") + client().prepareIndex("idx2") + .setId("1") .setSource( "{\"dates\": {\"month\": {\"label\": \"2014-11\", \"end\": \"2014-11-30\", \"start\": \"2014-11-01\"}, " + "\"day\": \"2014-11-30\"}, \"comments\": [{\"cid\": 3,\"identifier\": \"29111\"}, {\"cid\": 4,\"tags\": [" @@ -467,7 +470,8 @@ public void testParentFilterResolvedCorrectly() throws Exception { ) ); indexRequests.add( - client().prepareIndex("idx2", "provider", "2") + client().prepareIndex("idx2") + .setId("2") .setSource( "{\"dates\": {\"month\": {\"label\": \"2014-12\", \"end\": \"2014-12-31\", \"start\": \"2014-12-01\"}, " + "\"day\": \"2014-12-03\"}, \"comments\": [{\"cid\": 1, \"identifier\": \"29111\"}, {\"cid\": 2,\"tags\": [" @@ -478,7 +482,6 @@ public void testParentFilterResolvedCorrectly() throws Exception { indexRandom(true, indexRequests); SearchResponse response = client().prepareSearch("idx2") - .setTypes("provider") .addAggregation( terms("startDate").field("dates.month.start") .subAggregation( @@ -545,7 +548,8 @@ public void testNestedSameDocIdProcessedMultipleTime() throws Exception { ); ensureGreen("idx4"); - client().prepareIndex("idx4", "product", "1") + client().prepareIndex("idx4") + .setId("1") .setSource( jsonBuilder().startObject() .field("name", "product1") @@ -564,7 +568,8 @@ public void testNestedSameDocIdProcessedMultipleTime() throws Exception { .endObject() ) .get(); - client().prepareIndex("idx4", "product", "2") + client().prepareIndex("idx4") + .setId("2") .setSource( jsonBuilder().startObject() .field("name", "product2") @@ -586,7 +591,6 @@ public void testNestedSameDocIdProcessedMultipleTime() throws Exception { refresh(); SearchResponse response = client().prepareSearch("idx4") - .setTypes("product") .addAggregation( terms("category").field("categories") .subAggregation(nested("property", "property").subAggregation(terms("property_id").field("property.id"))) @@ -681,7 +685,8 @@ public void testFilterAggInsideNestedAgg() throws Exception { ) ); - client().prepareIndex("classes", "class", "1") + client().prepareIndex("classes") + .setId("1") .setSource( jsonBuilder().startObject() .field("name", "QueryBuilder") @@ -720,7 +725,8 @@ public void testFilterAggInsideNestedAgg() throws Exception { .endObject() ) .get(); - client().prepareIndex("classes", "class", "2") + client().prepareIndex("classes") + .setId("2") .setSource( jsonBuilder().startObject() .field("name", "Document") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java index 1b86f8fec9994..bfbfc53ed7e76 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java @@ -123,7 +123,7 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, i + 1) @@ -139,7 +139,8 @@ public void setupSuiteScopeCluster() throws Exception { prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer").get(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource( jsonBuilder().startObject() // shift sequence by 1, to ensure we have negative values, and value 3 on the edge of the tested ranges @@ -154,10 +155,10 @@ public void setupSuiteScopeCluster() throws Exception { prepareCreate("old_index").addMapping("_doc", "distance", "type=double", "route_length_miles", "type=alias,path=distance").get(); prepareCreate("new_index").addMapping("_doc", "route_length_miles", "type=double").get(); - builders.add(client().prepareIndex("old_index", "_doc").setSource("distance", 42.0)); - builders.add(client().prepareIndex("old_index", "_doc").setSource("distance", 50.5)); - builders.add(client().prepareIndex("new_index", "_doc").setSource("route_length_miles", 100.2)); - builders.add(client().prepareIndex("new_index", "_doc").setSource(Collections.emptyMap())); + builders.add(client().prepareIndex("old_index").setSource("distance", 42.0)); + builders.add(client().prepareIndex("old_index").setSource("distance", 50.5)); + builders.add(client().prepareIndex("new_index").setSource("route_length_miles", 100.2)); + builders.add(client().prepareIndex("new_index").setSource(Collections.emptyMap())); indexRandom(true, builders); ensureSearchable(); @@ -936,8 +937,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource(jsonBuilder().startObject().field("i", 1).endObject()), - client().prepareIndex("cache_test_idx", "type", "2").setSource(jsonBuilder().startObject().field("i", 2).endObject()) + client().prepareIndex("cache_test_idx").setId("1").setSource(jsonBuilder().startObject().field("i", 1).endObject()), + client().prepareIndex("cache_test_idx").setId("2").setSource(jsonBuilder().startObject().field("i", 2).endObject()) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java index fc60620345d0f..e8a57ea3941ff 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java @@ -156,7 +156,7 @@ private void insertIdx1(List values1, List values2) throws Excep source.startObject().field("field2", value1).endObject(); } source.endArray().endObject(); - indexRandom(false, client().prepareIndex("idx1", "type").setRouting("1").setSource(source)); + indexRandom(false, client().prepareIndex("idx1").setRouting("1").setSource(source)); } private void insertIdx2(String[][] values) throws Exception { @@ -169,7 +169,7 @@ private void insertIdx2(String[][] values) throws Exception { source.endArray().endObject(); } source.endArray().endObject(); - indexRandom(false, client().prepareIndex("idx2", "type").setRouting("1").setSource(source)); + indexRandom(false, client().prepareIndex("idx2").setRouting("1").setSource(source)); } public void testSimpleReverseNestedToRoot() throws Exception { @@ -569,7 +569,8 @@ public void testSameParentDocHavingMultipleBuckets() throws Exception { .addMapping("product", mapping) ); - client().prepareIndex("idx3", "product", "1") + client().prepareIndex("idx3") + .setId("1") .setRefreshPolicy(IMMEDIATE) .setSource( jsonBuilder().startObject() diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java index 2f586f2b9e788..94204b6519374 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java @@ -105,10 +105,12 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < data.length; i++) { String[] parts = data[i].split(","); - client().prepareIndex("test", "book", "" + i) + client().prepareIndex("test") + .setId("" + i) .setSource("author", parts[5], "name", parts[2], "genre", parts[8], "price", Float.parseFloat(parts[3])) .get(); - client().prepareIndex("idx_unmapped_author", "book", "" + i) + client().prepareIndex("idx_unmapped_author") + .setId("" + i) .setSource("name", parts[2], "genre", parts[8], "price", Float.parseFloat(parts[3])) .get(); } @@ -120,7 +122,6 @@ public void testIssue10719() throws Exception { // statement boolean asc = randomBoolean(); SearchResponse response = client().prepareSearch("test") - .setTypes("book") .setSearchType(SearchType.QUERY_THEN_FETCH) .addAggregation( terms("genres").field("genre") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java index 209da7c978b3c..2300e42b84bbc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java @@ -74,7 +74,7 @@ public class ShardReduceIT extends OpenSearchIntegTestCase { private IndexRequestBuilder indexDoc(String date, int value) throws Exception { - return client().prepareIndex("idx", "type") + return client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field("value", value) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java index 493dae383eab5..8f3d94c2eacdb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java @@ -51,7 +51,6 @@ public void testNoShardSizeString() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false)) @@ -76,7 +75,6 @@ public void testShardSizeEqualsSizeString() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key") @@ -106,7 +104,6 @@ public void testWithShardSizeString() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key") @@ -136,7 +133,6 @@ public void testWithShardSizeStringSingleShard() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .setRouting(routing1) .setQuery(matchAllQuery()) .addAggregation( @@ -166,7 +162,6 @@ public void testNoShardSizeTermOrderString() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true)) @@ -191,7 +186,6 @@ public void testNoShardSizeLong() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false)) @@ -216,7 +210,6 @@ public void testShardSizeEqualsSizeLong() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key") @@ -245,7 +238,6 @@ public void testWithShardSizeLong() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key") @@ -275,7 +267,6 @@ public void testWithShardSizeLongSingleShard() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .setRouting(routing1) .setQuery(matchAllQuery()) .addAggregation( @@ -305,7 +296,6 @@ public void testNoShardSizeTermOrderLong() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true)) @@ -330,7 +320,6 @@ public void testNoShardSizeDouble() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false)) @@ -355,7 +344,6 @@ public void testShardSizeEqualsSizeDouble() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key") @@ -384,7 +372,6 @@ public void testWithShardSizeDouble() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key") @@ -413,7 +400,6 @@ public void testWithShardSizeDoubleSingleShard() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .setRouting(routing1) .setQuery(matchAllQuery()) .addAggregation( @@ -443,7 +429,6 @@ public void testNoShardSizeTermOrderDouble() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true)) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index c075035abfea9..cbcc9c396fc06 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -40,6 +40,7 @@ import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.Plugin; @@ -92,7 +93,6 @@ public class SignificantTermsSignificanceScoreIT extends OpenSearchIntegTestCase { static final String INDEX_NAME = "testidx"; - static final String DOC_TYPE = "_doc"; static final String TEXT_FIELD = "text"; static final String CLASS_FIELD = "class"; @@ -148,11 +148,9 @@ public void testXContentResponse() throws Exception { // Use significant_text on text fields but occasionally run with alternative of // significant_terms on legacy fieldData=true too. request = client().prepareSearch(INDEX_NAME) - .setTypes(DOC_TYPE) .addAggregation(terms("class").field(CLASS_FIELD).subAggregation(significantText("sig_terms", TEXT_FIELD))); } else { request = client().prepareSearch(INDEX_NAME) - .setTypes(DOC_TYPE) .addAggregation(terms("class").field(CLASS_FIELD).subAggregation(significantTerms("sig_terms").field(TEXT_FIELD))); } @@ -224,10 +222,10 @@ public void testPopularTermManyDeletedDocs() throws Exception { String[] cat2v1 = { "constant", "two" }; String[] cat2v2 = { "constant", "duo" }; List indexRequestBuilderList = new ArrayList<>(); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "1").setSource(TEXT_FIELD, cat1v1, CLASS_FIELD, "1")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "2").setSource(TEXT_FIELD, cat1v2, CLASS_FIELD, "1")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "3").setSource(TEXT_FIELD, cat2v1, CLASS_FIELD, "2")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "4").setSource(TEXT_FIELD, cat2v2, CLASS_FIELD, "2")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("1").setSource(TEXT_FIELD, cat1v1, CLASS_FIELD, "1")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("2").setSource(TEXT_FIELD, cat1v2, CLASS_FIELD, "1")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("3").setSource(TEXT_FIELD, cat2v1, CLASS_FIELD, "2")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("4").setSource(TEXT_FIELD, cat2v2, CLASS_FIELD, "2")); indexRandom(true, false, indexRequestBuilderList); // Now create some holes in the index with selective deletes caused by updates. @@ -238,20 +236,18 @@ public void testPopularTermManyDeletedDocs() throws Exception { indexRequestBuilderList.clear(); for (int i = 0; i < 50; i++) { text = text == cat1v2 ? cat1v1 : cat1v2; - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "1").setSource(TEXT_FIELD, text, CLASS_FIELD, "1")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("1").setSource(TEXT_FIELD, text, CLASS_FIELD, "1")); } indexRandom(true, false, indexRequestBuilderList); SearchRequestBuilder request; if (randomBoolean()) { request = client().prepareSearch(INDEX_NAME) - .setTypes(DOC_TYPE) .addAggregation( terms("class").field(CLASS_FIELD).subAggregation(significantTerms("sig_terms").field(TEXT_FIELD).minDocCount(1)) ); } else { request = client().prepareSearch(INDEX_NAME) - .setTypes(DOC_TYPE) .addAggregation(terms("class").field(CLASS_FIELD).subAggregation(significantText("sig_terms", TEXT_FIELD).minDocCount(1))); } @@ -282,7 +278,6 @@ public void testBackgroundVsSeparateSet( SearchRequestBuilder request1; if (useSigText) { request1 = client().prepareSearch(INDEX_NAME) - .setTypes(DOC_TYPE) .addAggregation( terms("class").field(CLASS_FIELD) .subAggregation( @@ -292,7 +287,6 @@ public void testBackgroundVsSeparateSet( ); } else { request1 = client().prepareSearch(INDEX_NAME) - .setTypes(DOC_TYPE) .addAggregation( terms("class").field(CLASS_FIELD) .subAggregation( @@ -309,7 +303,6 @@ public void testBackgroundVsSeparateSet( SearchRequestBuilder request2; if (useSigText) { request2 = client().prepareSearch(INDEX_NAME) - .setTypes(DOC_TYPE) .addAggregation( filter("0", QueryBuilders.termQuery(CLASS_FIELD, "0")).subAggregation( significantText("sig_terms", TEXT_FIELD).minDocCount(1) @@ -326,7 +319,6 @@ public void testBackgroundVsSeparateSet( ); } else { request2 = client().prepareSearch(INDEX_NAME) - .setTypes(DOC_TYPE) .addAggregation( filter("0", QueryBuilders.termQuery(CLASS_FIELD, "0")).subAggregation( significantTerms("sig_terms").field(TEXT_FIELD) @@ -487,7 +479,7 @@ private void indexEqualTestData() throws ExecutionException, InterruptedExceptio List indexRequestBuilders = new ArrayList<>(); for (int i = 0; i < data.length; i++) { String[] parts = data[i].split("\t"); - indexRequestBuilders.add(client().prepareIndex("test", "_doc", "" + i).setSource("class", parts[0], "text", parts[1])); + indexRequestBuilders.add(client().prepareIndex("test").setId("" + i).setSource("class", parts[0], "text", parts[1])); } indexRandom(true, false, indexRequestBuilders); } @@ -553,7 +545,9 @@ private void indexRandomFrequencies01(String type) throws ExecutionException, In if (type.equals("text")) { textMappings += ",fielddata=true"; } - assertAcked(prepareCreate(INDEX_NAME).addMapping(DOC_TYPE, TEXT_FIELD, textMappings, CLASS_FIELD, "type=keyword")); + assertAcked( + prepareCreate(INDEX_NAME).addMapping(MapperService.SINGLE_MAPPING_NAME, TEXT_FIELD, textMappings, CLASS_FIELD, "type=keyword") + ); String[] gb = { "0", "1" }; List indexRequestBuilderList = new ArrayList<>(); for (int i = 0; i < randomInt(20); i++) { @@ -565,7 +559,7 @@ private void indexRandomFrequencies01(String type) throws ExecutionException, In text[0] = gb[randNum]; } indexRequestBuilderList.add( - client().prepareIndex(INDEX_NAME, DOC_TYPE).setSource(TEXT_FIELD, text, CLASS_FIELD, randomBoolean() ? "one" : "zero") + client().prepareIndex(INDEX_NAME).setSource(TEXT_FIELD, text, CLASS_FIELD, randomBoolean() ? "one" : "zero") ); } indexRandom(true, indexRequestBuilderList); @@ -587,8 +581,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java index aa2255435c8ff..9b941860177bb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.search.aggregations.bucket.terms.Terms.Bucket; @@ -81,7 +80,8 @@ public void setupSuiteScopeCluster() throws Exception { int numUniqueTerms = between(2, numDocs / 2); for (int i = 0; i < numDocs; i++) { builders.add( - client().prepareIndex("idx", "type", "" + i) + client().prepareIndex("idx") + .setId("" + i) .setSource( jsonBuilder().startObject() .field(STRING_FIELD_NAME, "val" + randomInt(numUniqueTerms)) @@ -97,7 +97,8 @@ public void setupSuiteScopeCluster() throws Exception { ); for (int i = 0; i < numDocs; i++) { builders.add( - client().prepareIndex("idx_single_shard", "type", "" + i) + client().prepareIndex("idx_single_shard") + .setId("" + i) .setSource( jsonBuilder().startObject() .field(STRING_FIELD_NAME, "val" + randomInt(numUniqueTerms)) @@ -108,16 +109,11 @@ public void setupSuiteScopeCluster() throws Exception { ); } numRoutingValues = between(1, 40); - assertAcked( - prepareCreate("idx_with_routing").addMapping( - "type", - "{ \"type\" : { \"_routing\" : { \"required\" : true } } }", - XContentType.JSON - ) - ); + assertAcked(prepareCreate("idx_with_routing").setMapping("{ \"_routing\" : { \"required\" : true } }")); for (int i = 0; i < numDocs; i++) { builders.add( - client().prepareIndex("idx_single_shard", "type", "" + i) + client().prepareIndex("idx_single_shard") + .setId("" + i) .setRouting(String.valueOf(randomInt(numRoutingValues))) .setSource( jsonBuilder().startObject() @@ -147,7 +143,8 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < entry.getValue(); i++) { String term = entry.getKey(); builders.add( - client().prepareIndex("idx_fixed_docs_0", "type", term + "-" + i) + client().prepareIndex("idx_fixed_docs_0") + .setId(term + "-" + i) .setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, term).endObject()) ); } @@ -172,7 +169,8 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < entry.getValue(); i++) { String term = entry.getKey(); builders.add( - client().prepareIndex("idx_fixed_docs_1", "type", term + "-" + i) + client().prepareIndex("idx_fixed_docs_1") + .setId(term + "-" + i) .setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, term).field("shard", 1).endObject()) ); } @@ -195,7 +193,8 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < entry.getValue(); i++) { String term = entry.getKey(); builders.add( - client().prepareIndex("idx_fixed_docs_2", "type", term + "-" + i) + client().prepareIndex("idx_fixed_docs_2") + .setId(term + "-" + i) .setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, term).field("shard", 2).endObject()) ); } @@ -303,7 +302,6 @@ public void testStringValueField() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(STRING_FIELD_NAME) @@ -317,7 +315,6 @@ public void testStringValueField() throws Exception { assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(STRING_FIELD_NAME) @@ -337,7 +334,6 @@ public void testStringValueFieldSingleShard() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(STRING_FIELD_NAME) @@ -351,7 +347,6 @@ public void testStringValueFieldSingleShard() throws Exception { assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(STRING_FIELD_NAME) @@ -372,7 +367,6 @@ public void testStringValueFieldWithRouting() throws Exception { int shardSize = randomIntBetween(size, size * 2); SearchResponse testResponse = client().prepareSearch("idx_with_routing") - .setTypes("type") .setRouting(String.valueOf(between(1, numRoutingValues))) .addAggregation( terms("terms").executionHint(randomExecutionHint()) @@ -393,7 +387,6 @@ public void testStringValueFieldDocCountAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(STRING_FIELD_NAME) @@ -408,7 +401,6 @@ public void testStringValueFieldDocCountAsc() throws Exception { assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(STRING_FIELD_NAME) @@ -429,7 +421,6 @@ public void testStringValueFieldTermSortAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(STRING_FIELD_NAME) @@ -444,7 +435,6 @@ public void testStringValueFieldTermSortAsc() throws Exception { assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(STRING_FIELD_NAME) @@ -465,7 +455,6 @@ public void testStringValueFieldTermSortDesc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(STRING_FIELD_NAME) @@ -480,7 +469,6 @@ public void testStringValueFieldTermSortDesc() throws Exception { assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(STRING_FIELD_NAME) @@ -501,7 +489,6 @@ public void testStringValueFieldSubAggAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(STRING_FIELD_NAME) @@ -517,7 +504,6 @@ public void testStringValueFieldSubAggAsc() throws Exception { assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(STRING_FIELD_NAME) @@ -539,7 +525,6 @@ public void testStringValueFieldSubAggDesc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(STRING_FIELD_NAME) @@ -555,7 +540,6 @@ public void testStringValueFieldSubAggDesc() throws Exception { assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(STRING_FIELD_NAME) @@ -577,7 +561,6 @@ public void testLongValueField() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(LONG_FIELD_NAME) @@ -591,7 +574,6 @@ public void testLongValueField() throws Exception { assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(LONG_FIELD_NAME) @@ -611,7 +593,6 @@ public void testLongValueFieldSingleShard() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(LONG_FIELD_NAME) @@ -625,7 +606,6 @@ public void testLongValueFieldSingleShard() throws Exception { assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(LONG_FIELD_NAME) @@ -646,7 +626,6 @@ public void testLongValueFieldWithRouting() throws Exception { int shardSize = randomIntBetween(size, size * 2); SearchResponse testResponse = client().prepareSearch("idx_with_routing") - .setTypes("type") .setRouting(String.valueOf(between(1, numRoutingValues))) .addAggregation( terms("terms").executionHint(randomExecutionHint()) @@ -667,7 +646,6 @@ public void testLongValueFieldDocCountAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(LONG_FIELD_NAME) @@ -682,7 +660,6 @@ public void testLongValueFieldDocCountAsc() throws Exception { assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(LONG_FIELD_NAME) @@ -703,7 +680,6 @@ public void testLongValueFieldTermSortAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(LONG_FIELD_NAME) @@ -718,7 +694,6 @@ public void testLongValueFieldTermSortAsc() throws Exception { assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(LONG_FIELD_NAME) @@ -739,7 +714,6 @@ public void testLongValueFieldTermSortDesc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(LONG_FIELD_NAME) @@ -754,7 +728,6 @@ public void testLongValueFieldTermSortDesc() throws Exception { assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(LONG_FIELD_NAME) @@ -775,7 +748,6 @@ public void testLongValueFieldSubAggAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(LONG_FIELD_NAME) @@ -791,7 +763,6 @@ public void testLongValueFieldSubAggAsc() throws Exception { assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(LONG_FIELD_NAME) @@ -813,7 +784,6 @@ public void testLongValueFieldSubAggDesc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(LONG_FIELD_NAME) @@ -829,7 +799,6 @@ public void testLongValueFieldSubAggDesc() throws Exception { assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(LONG_FIELD_NAME) @@ -851,7 +820,6 @@ public void testDoubleValueField() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(DOUBLE_FIELD_NAME) @@ -865,7 +833,6 @@ public void testDoubleValueField() throws Exception { assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(DOUBLE_FIELD_NAME) @@ -885,7 +852,6 @@ public void testDoubleValueFieldSingleShard() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(DOUBLE_FIELD_NAME) @@ -899,7 +865,6 @@ public void testDoubleValueFieldSingleShard() throws Exception { assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(DOUBLE_FIELD_NAME) @@ -920,7 +885,6 @@ public void testDoubleValueFieldWithRouting() throws Exception { int shardSize = randomIntBetween(size, size * 2); SearchResponse testResponse = client().prepareSearch("idx_with_routing") - .setTypes("type") .setRouting(String.valueOf(between(1, numRoutingValues))) .addAggregation( terms("terms").executionHint(randomExecutionHint()) @@ -941,7 +905,6 @@ public void testDoubleValueFieldDocCountAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(DOUBLE_FIELD_NAME) @@ -956,7 +919,6 @@ public void testDoubleValueFieldDocCountAsc() throws Exception { assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(DOUBLE_FIELD_NAME) @@ -977,7 +939,6 @@ public void testDoubleValueFieldTermSortAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(DOUBLE_FIELD_NAME) @@ -992,7 +953,6 @@ public void testDoubleValueFieldTermSortAsc() throws Exception { assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(DOUBLE_FIELD_NAME) @@ -1013,7 +973,6 @@ public void testDoubleValueFieldTermSortDesc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(DOUBLE_FIELD_NAME) @@ -1028,7 +987,6 @@ public void testDoubleValueFieldTermSortDesc() throws Exception { assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(DOUBLE_FIELD_NAME) @@ -1049,7 +1007,6 @@ public void testDoubleValueFieldSubAggAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(DOUBLE_FIELD_NAME) @@ -1065,7 +1022,6 @@ public void testDoubleValueFieldSubAggAsc() throws Exception { assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(DOUBLE_FIELD_NAME) @@ -1087,7 +1043,6 @@ public void testDoubleValueFieldSubAggDesc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(DOUBLE_FIELD_NAME) @@ -1103,7 +1058,6 @@ public void testDoubleValueFieldSubAggDesc() throws Exception { assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(DOUBLE_FIELD_NAME) @@ -1128,7 +1082,6 @@ public void testDoubleValueFieldSubAggDesc() throws Exception { */ public void testFixedDocs() throws Exception { SearchResponse response = client().prepareSearch("idx_fixed_docs_0", "idx_fixed_docs_1", "idx_fixed_docs_2") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(STRING_FIELD_NAME) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java index 7d3a6ef7461a8..af006210326d8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java @@ -35,6 +35,7 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.filter.InternalFilter; @@ -57,7 +58,6 @@ public class TermsShardMinDocCountIT extends OpenSearchIntegTestCase { private static final String index = "someindex"; - private static final String type = "testtype"; private static String randomExecutionHint() { return randomBoolean() ? null : randomFrom(SignificantTermsAggregatorFactory.ExecutionMode.values()).toString(); @@ -73,7 +73,7 @@ public void testShardMinDocCountSignificantTermsTest() throws Exception { } assertAcked( prepareCreate(index).setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) - .addMapping(type, "text", textMappings) + .addMapping(MapperService.SINGLE_MAPPING_NAME, "text", textMappings) ); List indexBuilders = new ArrayList<>(); @@ -125,10 +125,10 @@ private void addTermsDocs(String term, int numInClass, int numNotInClass, List indexBuilders = new ArrayList<>(); @@ -189,7 +189,7 @@ public void testShardMinDocCountTermsTest() throws Exception { private static void addTermsDocs(String term, int numDocs, List builders) { String sourceClass = "{\"text\": \"" + term + "\"}"; for (int i = 0; i < numDocs; i++) { - builders.add(client().prepareIndex(index, type).setSource(sourceClass, XContentType.JSON)); + builders.add(client().prepareIndex(index).setSource(sourceClass, XContentType.JSON)); } } } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java index ead7a654baadf..252ffeb4ca0e7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java @@ -171,7 +171,7 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < 5; i++) { builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, "val" + i) @@ -206,7 +206,7 @@ public void setupSuiteScopeCluster() throws Exception { ); for (int i = 0; i < 100; i++) { builders.add( - client().prepareIndex("high_card_idx", "type") + client().prepareIndex("high_card_idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, "val" + Strings.padStart(i + "", 3, '0')) @@ -222,7 +222,8 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) ); } @@ -293,44 +294,44 @@ private void getMultiSortDocs(List builders) throws IOExcep ); for (int i = 1; i <= 3; i++) { builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val1").field("l", 1).field("d", i).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val2").field("l", 2).field("d", i).endObject()) ); } builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val3").field("l", 3).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val3").field("l", 3).field("d", 2).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val4").field("l", 3).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val4").field("l", 3).field("d", 3).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val5").field("l", 5).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val5").field("l", 5).field("d", 2).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val6").field("l", 5).field("d", 1).endObject()) ); builders.add( - client().prepareIndex("sort_idx", "type") + client().prepareIndex("sort_idx") .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val7").field("l", 5).field("d", 1).endObject()) ); } @@ -368,7 +369,6 @@ public void testMultiValueFieldWithPartitionedFiltering() throws Exception { private void runTestFieldWithPartitionedFiltering(String field) throws Exception { // Find total number of unique terms SearchResponse allResponse = client().prepareSearch("idx") - .setTypes("type") .addAggregation(terms("terms").field(field).size(10000).collectMode(randomFrom(SubAggCollectionMode.values()))) .get(); assertSearchResponse(allResponse); @@ -382,7 +382,6 @@ private void runTestFieldWithPartitionedFiltering(String field) throws Exception Set foundTerms = new HashSet<>(); for (int partition = 0; partition < numPartitions; partition++) { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("terms").field(field) .includeExclude(new IncludeExclude(partition, numPartitions)) @@ -402,7 +401,6 @@ private void runTestFieldWithPartitionedFiltering(String field) throws Exception public void testSingleValuedFieldWithValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(SINGLE_VALUED_FIELD_NAME) @@ -428,7 +426,6 @@ public void testSingleValuedFieldWithValueScript() throws Exception { public void testMultiValuedFieldWithValueScriptNotUnique() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(MULTI_VALUED_FIELD_NAME) @@ -452,7 +449,6 @@ public void testMultiValuedFieldWithValueScriptNotUnique() throws Exception { public void testMultiValuedScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .script( @@ -488,7 +484,6 @@ public void testMultiValuedScript() throws Exception { public void testMultiValuedFieldWithValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(MULTI_VALUED_FIELD_NAME) @@ -537,7 +532,6 @@ public void testScriptSingleValue() throws Exception { ); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())).executionHint(randomExecutionHint()).script(script) ) @@ -567,7 +561,6 @@ public void testScriptSingleValueExplicitSingleValue() throws Exception { ); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())).executionHint(randomExecutionHint()).script(script) ) @@ -590,7 +583,6 @@ public void testScriptSingleValueExplicitSingleValue() throws Exception { public void testScriptMultiValued() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())) .executionHint(randomExecutionHint()) @@ -626,7 +618,6 @@ public void testScriptMultiValued() throws Exception { public void testPartiallyUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(SINGLE_VALUED_FIELD_NAME) @@ -652,7 +643,6 @@ public void testPartiallyUnmapped() throws Exception { public void testStringTermsNestedIntoPerBucketAggregator() throws Exception { // no execution hint so that the logic that decides whether or not to use ordinals is executed SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( filter("filter", termQuery(MULTI_VALUED_FIELD_NAME, "val3")).subAggregation( terms("terms").field(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())) @@ -681,7 +671,6 @@ public void testSingleValuedFieldOrderedByIllegalAgg() throws Exception { boolean asc = true; try { client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(SINGLE_VALUED_FIELD_NAME) @@ -710,7 +699,6 @@ public void testSingleValuedFieldOrderedByIllegalAgg() throws Exception { public void testSingleValuedFieldOrderedBySingleBucketSubAggregationAsc() throws Exception { boolean asc = randomBoolean(); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("tags").executionHint(randomExecutionHint()) .field("tag") @@ -749,7 +737,6 @@ public void testSingleValuedFieldOrderedBySingleBucketSubAggregationAsc() throws public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevels() throws Exception { boolean asc = randomBoolean(); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("tags").executionHint(randomExecutionHint()) .field("tag") @@ -813,7 +800,6 @@ public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevelsS String statsName = statsNameBuilder.toString(); boolean asc = randomBoolean(); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("tags").executionHint(randomExecutionHint()) .field("tag") @@ -877,7 +863,6 @@ public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevelsS String statsName = statsNameBuilder.toString(); boolean asc = randomBoolean(); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("tags").executionHint(randomExecutionHint()) .field("tag") @@ -936,7 +921,6 @@ public void testSingleValuedFieldOrderedByMissingSubAggregation() throws Excepti for (String index : Arrays.asList("idx", "idx_unmapped")) { try { client().prepareSearch(index) - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(SINGLE_VALUED_FIELD_NAME) @@ -957,7 +941,6 @@ public void testSingleValuedFieldOrderedByNonMetricsOrMultiBucketSubAggregation( for (String index : Arrays.asList("idx", "idx_unmapped")) { try { client().prepareSearch(index) - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(SINGLE_VALUED_FIELD_NAME) @@ -982,7 +965,6 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithUnknownMe for (String index : Arrays.asList("idx", "idx_unmapped")) { try { SearchResponse response = client().prepareSearch(index) - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(SINGLE_VALUED_FIELD_NAME) @@ -1008,7 +990,6 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithoutMetric for (String index : Arrays.asList("idx", "idx_unmapped")) { try { client().prepareSearch(index) - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(SINGLE_VALUED_FIELD_NAME) @@ -1033,7 +1014,6 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithoutMetric public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws Exception { boolean asc = true; SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(SINGLE_VALUED_FIELD_NAME) @@ -1066,7 +1046,6 @@ public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws E public void testSingleValuedFieldOrderedByMultiValueSubAggregationDesc() throws Exception { boolean asc = false; SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(SINGLE_VALUED_FIELD_NAME) @@ -1100,7 +1079,6 @@ public void testSingleValuedFieldOrderedByMultiValueSubAggregationDesc() throws public void testSingleValuedFieldOrderedByMultiValueExtendedStatsAsc() throws Exception { boolean asc = true; SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(SINGLE_VALUED_FIELD_NAME) @@ -1134,7 +1112,6 @@ public void testSingleValuedFieldOrderedByMultiValueExtendedStatsAsc() throws Ex public void testSingleValuedFieldOrderedByStatsAggAscWithTermsSubAgg() throws Exception { boolean asc = true; SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(SINGLE_VALUED_FIELD_NAME) @@ -1253,7 +1230,6 @@ private void assertMultiSortResponse(String[] expectedKeys, BucketOrder... order public void testIndexMetaField() throws Exception { SearchResponse response = client().prepareSearch("idx", "empty_bucket_idx") - .setTypes("type") .addAggregation( terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())) .executionHint(randomExecutionHint()) @@ -1292,8 +1268,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", "foo"), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", "bar") + client().prepareIndex("cache_test_idx").setId("1").setSource("s", "foo"), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", "bar") ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java index 9be3ed91676c0..e01f966cadd9b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java @@ -160,7 +160,7 @@ public void setupSuiteScopeCluster() throws Exception { precisionThreshold = randomIntBetween(0, 1 << randomInt(20)); IndexRequestBuilder[] builders = new IndexRequestBuilder[(int) numDocs]; for (int i = 0; i < numDocs; ++i) { - builders[i] = client().prepareIndex("idx", "type") + builders[i] = client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field("str_value", "s" + i) @@ -177,7 +177,7 @@ public void setupSuiteScopeCluster() throws Exception { IndexRequestBuilder[] dummyDocsBuilder = new IndexRequestBuilder[10]; for (int i = 0; i < dummyDocsBuilder.length; i++) { - dummyDocsBuilder[i] = client().prepareIndex("idx", "type").setSource("a_field", "1"); + dummyDocsBuilder[i] = client().prepareIndex("idx").setSource("a_field", "1"); } indexRandom(true, dummyDocsBuilder); @@ -204,7 +204,6 @@ private static String multiNumericField(boolean hash) { public void testUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx_unmapped") - .setTypes("type") .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_value")) .get(); @@ -218,7 +217,6 @@ public void testUnmapped() throws Exception { public void testPartiallyUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .setTypes("type") .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_value")) .get(); @@ -232,7 +230,6 @@ public void testPartiallyUnmapped() throws Exception { public void testSingleValuedString() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_value")) .get(); @@ -246,7 +243,6 @@ public void testSingleValuedString() throws Exception { public void testSingleValuedNumeric() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField())) .get(); @@ -289,7 +285,6 @@ public void testSingleValuedNumericGetProperty() throws Exception { public void testSingleValuedNumericHashed() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField())) .get(); @@ -303,7 +298,6 @@ public void testSingleValuedNumericHashed() throws Exception { public void testMultiValuedString() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_values")) .get(); @@ -317,7 +311,6 @@ public void testMultiValuedString() throws Exception { public void testMultiValuedNumeric() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(multiNumericField(false))) .get(); @@ -331,7 +324,6 @@ public void testMultiValuedNumeric() throws Exception { public void testMultiValuedNumericHashed() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(multiNumericField(true))) .get(); @@ -345,7 +337,6 @@ public void testMultiValuedNumericHashed() throws Exception { public void testSingleValuedStringScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( cardinality("cardinality").precisionThreshold(precisionThreshold) .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['str_value'].value", emptyMap())) @@ -362,7 +353,6 @@ public void testSingleValuedStringScript() throws Exception { public void testMultiValuedStringScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( cardinality("cardinality").precisionThreshold(precisionThreshold) .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['str_values']", emptyMap())) @@ -380,7 +370,6 @@ public void testMultiValuedStringScript() throws Exception { public void testSingleValuedNumericScript() throws Exception { Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc[' + singleNumericField() + '].value", emptyMap()); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).script(script)) .get(); @@ -400,7 +389,6 @@ public void testMultiValuedNumericScript() throws Exception { Collections.emptyMap() ); SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).script(script)) .get(); @@ -414,7 +402,6 @@ public void testMultiValuedNumericScript() throws Exception { public void testSingleValuedStringValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( cardinality("cardinality").precisionThreshold(precisionThreshold) .field("str_value") @@ -432,7 +419,6 @@ public void testSingleValuedStringValueScript() throws Exception { public void testMultiValuedStringValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( cardinality("cardinality").precisionThreshold(precisionThreshold) .field("str_values") @@ -450,7 +436,6 @@ public void testMultiValuedStringValueScript() throws Exception { public void testSingleValuedNumericValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( cardinality("cardinality").precisionThreshold(precisionThreshold) .field(singleNumericField()) @@ -468,7 +453,6 @@ public void testSingleValuedNumericValueScript() throws Exception { public void testMultiValuedNumericValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( cardinality("cardinality").precisionThreshold(precisionThreshold) .field(multiNumericField(false)) @@ -486,7 +470,6 @@ public void testMultiValuedNumericValueScript() throws Exception { public void testAsSubAgg() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( terms("terms").field("str_value") .collectMode(randomFrom(SubAggCollectionMode.values())) @@ -517,8 +500,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java index efebe1b0747a2..e8d425596beb0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java @@ -60,7 +60,7 @@ public void testRequestBreaker() throws Exception { true, IntStream.range(0, randomIntBetween(10, 1000)) .mapToObj( - i -> client().prepareIndex("test", "_doc") + i -> client().prepareIndex("test") .setId("id_" + i) .setSource(org.opensearch.common.collect.Map.of("field0", randomAlphaOfLength(5), "field1", randomAlphaOfLength(5))) ) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ExtendedStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ExtendedStatsIT.java index fec81ec3a64a2..9549aad5399b5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ExtendedStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ExtendedStatsIT.java @@ -875,8 +875,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java index f8a7e36455b06..7aa602fff2ee8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java @@ -591,8 +591,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java index c75e7e442f3e1..68f8cf6da575a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java @@ -560,8 +560,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java index 7bb9492cb4ae7..79f1809fc2f3a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java @@ -117,7 +117,8 @@ public void setupSuiteScopeCluster() throws Exception { multiValueSample[i * 2] = firstMultiValueDatapoint; multiValueSample[(i * 2) + 1] = secondMultiValueDatapoint; - IndexRequestBuilder builder = client().prepareIndex("idx", "_doc", String.valueOf(i)) + IndexRequestBuilder builder = client().prepareIndex("idx") + .setId(String.valueOf(i)) .setSource( jsonBuilder().startObject() .field("value", singleValueDatapoint) @@ -141,7 +142,8 @@ public void setupSuiteScopeCluster() throws Exception { builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", String.valueOf(i)) + client().prepareIndex("empty_bucket_idx") + .setId(String.valueOf(i)) .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) ); } @@ -521,8 +523,8 @@ public void testScriptCaching() throws Exception { indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java index 5c9a64965a172..beacf7aa1ccec 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -293,7 +293,8 @@ public void setupSuiteScopeCluster() throws Exception { numDocs = randomIntBetween(10, 100); for (int i = 0; i < numDocs; i++) { builders.add( - client().prepareIndex("idx", "type", "" + i) + client().prepareIndex("idx") + .setId("" + i) .setSource( jsonBuilder().startObject().field("value", randomAlphaOfLengthBetween(5, 15)).field("l_value", i).endObject() ) @@ -313,7 +314,8 @@ public void setupSuiteScopeCluster() throws Exception { builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) ); } @@ -1187,8 +1189,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/StatsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/StatsIT.java index 10005be669a34..27fc26a114cc4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/StatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/StatsIT.java @@ -264,8 +264,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/SumIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/SumIT.java index 45322b53109e9..e9b8c91090695 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/SumIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/SumIT.java @@ -91,9 +91,9 @@ public void setupSuiteScopeCluster() throws Exception { prepareCreate("new_index").addMapping("_doc", "transit_mode", "type=keyword", "route_length_miles", "type=double").get(); List builders = new ArrayList<>(); - builders.add(client().prepareIndex("old_index", "_doc").setSource("transit_mode", "train", "distance", 42.0)); - builders.add(client().prepareIndex("old_index", "_doc").setSource("transit_mode", "bus", "distance", 50.5)); - builders.add(client().prepareIndex("new_index", "_doc").setSource("transit_mode", "train", "route_length_miles", 100.2)); + builders.add(client().prepareIndex("old_index").setSource("transit_mode", "train", "distance", 42.0)); + builders.add(client().prepareIndex("old_index").setSource("transit_mode", "bus", "distance", 50.5)); + builders.add(client().prepareIndex("new_index").setSource("transit_mode", "train", "route_length_miles", 100.2)); indexRandom(true, builders); ensureSearchable(); @@ -242,8 +242,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java index 68acc61befb54..8b28261f7f00b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java @@ -503,8 +503,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java index cc3ea5062499c..2da6ac3f9e586 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java @@ -475,8 +475,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java index 17a5639070aa9..b6fdcf4b6267d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java @@ -182,7 +182,8 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < 50; i++) { builders.add( - client().prepareIndex("idx", "type", Integer.toString(i)) + client().prepareIndex("idx") + .setId(Integer.toString(i)) .setSource( jsonBuilder().startObject() .field(TERMS_AGGS_FIELD, "val" + (i / 10)) @@ -196,39 +197,48 @@ public void setupSuiteScopeCluster() throws Exception { } builders.add( - client().prepareIndex("field-collapsing", "type", "1") + client().prepareIndex("field-collapsing") + .setId("1") .setSource(jsonBuilder().startObject().field("group", "a").field("text", "term x y z b").endObject()) ); builders.add( - client().prepareIndex("field-collapsing", "type", "2") + client().prepareIndex("field-collapsing") + .setId("2") .setSource(jsonBuilder().startObject().field("group", "a").field("text", "term x y z n rare").field("value", 1).endObject()) ); builders.add( - client().prepareIndex("field-collapsing", "type", "3") + client().prepareIndex("field-collapsing") + .setId("3") .setSource(jsonBuilder().startObject().field("group", "b").field("text", "x y z term").endObject()) ); builders.add( - client().prepareIndex("field-collapsing", "type", "4") + client().prepareIndex("field-collapsing") + .setId("4") .setSource(jsonBuilder().startObject().field("group", "b").field("text", "x y term").endObject()) ); builders.add( - client().prepareIndex("field-collapsing", "type", "5") + client().prepareIndex("field-collapsing") + .setId("5") .setSource(jsonBuilder().startObject().field("group", "b").field("text", "x term").endObject()) ); builders.add( - client().prepareIndex("field-collapsing", "type", "6") + client().prepareIndex("field-collapsing") + .setId("6") .setSource(jsonBuilder().startObject().field("group", "b").field("text", "term rare").field("value", 3).endObject()) ); builders.add( - client().prepareIndex("field-collapsing", "type", "7") + client().prepareIndex("field-collapsing") + .setId("7") .setSource(jsonBuilder().startObject().field("group", "c").field("text", "x y z term").endObject()) ); builders.add( - client().prepareIndex("field-collapsing", "type", "8") + client().prepareIndex("field-collapsing") + .setId("8") .setSource(jsonBuilder().startObject().field("group", "c").field("text", "x y term b").endObject()) ); builders.add( - client().prepareIndex("field-collapsing", "type", "9") + client().prepareIndex("field-collapsing") + .setId("9") .setSource(jsonBuilder().startObject().field("group", "c").field("text", "rare x term").field("value", 2).endObject()) ); @@ -243,11 +253,12 @@ public void setupSuiteScopeCluster() throws Exception { } builder.endArray().endObject(); - builders.add(client().prepareIndex("articles", "article").setSource(builder)); + builders.add(client().prepareIndex("articles").setSource(builder)); } builders.add( - client().prepareIndex("articles", "article", "1") + client().prepareIndex("articles") + .setId("1") .setSource( jsonBuilder().startObject() .field("title", "title 1") @@ -290,7 +301,8 @@ public void setupSuiteScopeCluster() throws Exception { ) ); builders.add( - client().prepareIndex("articles", "article", "2") + client().prepareIndex("articles") + .setId("2") .setSource( jsonBuilder().startObject() .field("title", "title 2") @@ -1123,7 +1135,6 @@ public void testNoStoredFields() throws Exception { for (SearchHit hit : hits) { assertThat(hit.getSourceAsMap(), nullValue()); assertThat(hit.getId(), nullValue()); - assertThat(hit.getType(), equalTo("type")); } } } @@ -1143,8 +1154,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java index b96efbd335caf..6d3fe1ed3f190 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java @@ -73,7 +73,8 @@ public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); createIndex("idx_unmapped"); for (int i = 0; i < 10; i++) { - client().prepareIndex("idx", "type", "" + i) + client().prepareIndex("idx") + .setId("" + i) .setSource( jsonBuilder().startObject().field("value", i + 1).startArray("values").value(i + 2).value(i + 3).endArray().endObject() ) @@ -243,8 +244,8 @@ public void testScriptCaching() throws Exception { ); indexRandom( true, - client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), - client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2) + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) ); // Make sure we are starting with a clear cache diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java index f7994cef4788a..590587185b80e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java @@ -88,7 +88,7 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, fieldValue) @@ -103,7 +103,8 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketScriptIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketScriptIT.java index 3a94d04ef81fb..5de4e5162247d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketScriptIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketScriptIT.java @@ -155,7 +155,7 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int docs = 0; docs < numDocs; docs++) { - builders.add(client().prepareIndex("idx", "type").setSource(newDocBuilder())); + builders.add(client().prepareIndex("idx").setSource(newDocBuilder())); } indexRandom(true, builders); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSelectorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSelectorIT.java index 56fe309d5f984..7674679378758 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSelectorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSelectorIT.java @@ -162,12 +162,12 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int docs = 0; docs < numDocs; docs++) { - builders.add(client().prepareIndex("idx", "type").setSource(newDocBuilder())); + builders.add(client().prepareIndex("idx").setSource(newDocBuilder())); } - builders.add(client().prepareIndex("idx_with_gaps", "type").setSource(newDocBuilder(1, 1, 0, 0))); - builders.add(client().prepareIndex("idx_with_gaps", "type").setSource(newDocBuilder(1, 2, 0, 0))); - builders.add(client().prepareIndex("idx_with_gaps", "type").setSource(newDocBuilder(3, 1, 0, 0))); - builders.add(client().prepareIndex("idx_with_gaps", "type").setSource(newDocBuilder(3, 3, 0, 0))); + builders.add(client().prepareIndex("idx_with_gaps").setSource(newDocBuilder(1, 1, 0, 0))); + builders.add(client().prepareIndex("idx_with_gaps").setSource(newDocBuilder(1, 2, 0, 0))); + builders.add(client().prepareIndex("idx_with_gaps").setSource(newDocBuilder(3, 1, 0, 0))); + builders.add(client().prepareIndex("idx_with_gaps").setSource(newDocBuilder(3, 3, 0, 0))); indexRandom(true, builders); ensureSearchable(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSortIT.java index 4c3b956512a3f..d05740a5a0f36 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSortIT.java @@ -84,7 +84,6 @@ public void setupSuiteScopeCluster() throws Exception { client().admin() .indices() .preparePutMapping(INDEX) - .setType("doc") .setSource("time", "type=date", "foo", "type=keyword", "value_1", "type=float", "value_2", "type=float") .get(); @@ -102,16 +101,16 @@ public void setupSuiteScopeCluster() throws Exception { int termCount = randomIntBetween(3, 6); for (int i = 0; i < termCount; ++i) { builders.add( - client().prepareIndex(INDEX, "doc").setSource(newDocBuilder(time, term, randomIntBetween(1, 10) * randomDouble())) + client().prepareIndex(INDEX).setSource(newDocBuilder(time, term, randomIntBetween(1, 10) * randomDouble())) ); } } time += TimeValue.timeValueHours(1).millis(); } - builders.add(client().prepareIndex(INDEX_WITH_GAPS, "doc").setSource(newDocBuilder(1, "foo", 1.0, 42.0))); - builders.add(client().prepareIndex(INDEX_WITH_GAPS, "doc").setSource(newDocBuilder(2, "foo", null, 42.0))); - builders.add(client().prepareIndex(INDEX_WITH_GAPS, "doc").setSource(newDocBuilder(3, "foo", 3.0, 42.0))); + builders.add(client().prepareIndex(INDEX_WITH_GAPS).setSource(newDocBuilder(1, "foo", 1.0, 42.0))); + builders.add(client().prepareIndex(INDEX_WITH_GAPS).setSource(newDocBuilder(2, "foo", null, 42.0))); + builders.add(client().prepareIndex(INDEX_WITH_GAPS).setSource(newDocBuilder(3, "foo", 3.0, 42.0))); indexRandom(true, builders); ensureSearchable(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java index 0f54e8acae427..d7f16b25a46e8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java @@ -81,12 +81,11 @@ private ZonedDateTime date(int month, int day) { } private static IndexRequestBuilder indexDoc(String idx, ZonedDateTime date, int value) throws Exception { - return client().prepareIndex(idx, "type") - .setSource(jsonBuilder().startObject().timeField("date", date).field("value", value).endObject()); + return client().prepareIndex(idx).setSource(jsonBuilder().startObject().timeField("date", date).field("value", value).endObject()); } private IndexRequestBuilder indexDoc(int month, int day, int value) throws Exception { - return client().prepareIndex("idx", "type") + return client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field("value", value) @@ -108,7 +107,8 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java index 449807fbe096e..cff655e040124 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java @@ -132,7 +132,7 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < numValueBuckets; i++) { for (int docs = 0; docs < valueCounts[i]; docs++) { - builders.add(client().prepareIndex("idx", "type").setSource(newDocBuilder(i * interval))); + builders.add(client().prepareIndex("idx").setSource(newDocBuilder(i * interval))); } } @@ -143,7 +143,7 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < valueCounts_empty.length; i++) { for (int docs = 0; docs < valueCounts_empty[i]; docs++) { - builders.add(client().prepareIndex("empty_bucket_idx", "type").setSource(newDocBuilder(i))); + builders.add(client().prepareIndex("empty_bucket_idx").setSource(newDocBuilder(i))); numDocsEmptyIdx++; } } @@ -160,7 +160,7 @@ public void setupSuiteScopeCluster() throws Exception { // make approximately half of the buckets empty if (randomBoolean()) valueCounts_empty_rnd[i] = 0L; for (int docs = 0; docs < valueCounts_empty_rnd[i]; docs++) { - builders.add(client().prepareIndex("empty_bucket_idx_rnd", "type").setSource(newDocBuilder(i))); + builders.add(client().prepareIndex("empty_bucket_idx_rnd").setSource(newDocBuilder(i))); numDocsEmptyIdx_rnd++; } if (i > 0) { @@ -664,7 +664,7 @@ public void testAvgMovavgDerivNPE() throws Exception { } XContentBuilder doc = jsonBuilder().startObject().field("tick", i).field("value", value).endObject(); - client().prepareIndex("movavg_npe", "type").setSource(doc).get(); + client().prepareIndex("movavg_npe").setSource(doc).get(); } refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java index 195b1a5c8bd33..4400181eb2226 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java @@ -91,7 +91,7 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, fieldValue) @@ -107,7 +107,8 @@ public void setupSuiteScopeCluster() throws Exception { // creates 6 documents where the value of the field is 0, 1, 2, 3, // 3, 5 builders.add( - client().prepareIndex("idx_gappy", "type", "" + i) + client().prepareIndex("idx_gappy") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i == 4 ? 3 : i).endObject()) ); } @@ -115,7 +116,8 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java index eb26e8b38f0ec..3d9ebb469cba6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java @@ -102,7 +102,7 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, fieldValue) @@ -117,7 +117,8 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) ); } @@ -565,7 +566,7 @@ public void testFieldIsntWrittenOutTwice() throws Exception { .field("@timestamp", "2018-07-08T08:07:00.599Z") .endObject(); - client().prepareIndex("foo_2", "doc").setSource(docBuilder).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("foo_2").setSource(docBuilder).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); client().admin().indices().prepareRefresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java index 3d12a36224366..33cc350f10ff1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java @@ -88,7 +88,7 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, fieldValue) @@ -103,7 +103,8 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java index e3df25fe0e2e6..dc37b49e7a910 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java @@ -175,7 +175,7 @@ public void setupSuiteScopeCluster() throws Exception { for (PipelineAggregationHelperTests.MockBucket mockBucket : mockHisto) { for (double value : mockBucket.docValues) { builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource(jsonBuilder().startObject().field(INTERVAL_FIELD, mockBucket.key).field(VALUE_FIELD, value).endObject()) ); } @@ -183,14 +183,14 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = -10; i < 10; i++) { builders.add( - client().prepareIndex("neg_idx", "type") + client().prepareIndex("neg_idx") .setSource(jsonBuilder().startObject().field(INTERVAL_FIELD, i).field(VALUE_FIELD, 10).endObject()) ); } for (int i = 0; i < 12; i++) { builders.add( - client().prepareIndex("double_predict", "type") + client().prepareIndex("double_predict") .setSource(jsonBuilder().startObject().field(INTERVAL_FIELD, i).field(VALUE_FIELD, 10).endObject()) ); } @@ -1288,7 +1288,7 @@ public void testPredictWithNonEmptyBuckets() throws Exception { for (int i = 0; i < 10; i++) { bulkBuilder.add( - client().prepareIndex("predict_non_empty", "type") + client().prepareIndex("predict_non_empty") .setSource( jsonBuilder().startObject().field(INTERVAL_FIELD, i).field(VALUE_FIELD, 10).field(VALUE_FIELD2, 10).endObject() ) @@ -1297,7 +1297,7 @@ public void testPredictWithNonEmptyBuckets() throws Exception { for (int i = 10; i < 20; i++) { // Extra so there is a bucket that only has second field bulkBuilder.add( - client().prepareIndex("predict_non_empty", "type") + client().prepareIndex("predict_non_empty") .setSource(jsonBuilder().startObject().field(INTERVAL_FIELD, i).field(VALUE_FIELD2, 10).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java index 48e79b3696ecb..6728c9f888aeb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java @@ -92,7 +92,7 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, fieldValue) @@ -107,7 +107,8 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SerialDiffIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SerialDiffIT.java index b7a11bbe4aba7..f5a5d025946ec 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SerialDiffIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SerialDiffIT.java @@ -168,7 +168,7 @@ public void setupSuiteScopeCluster() throws Exception { for (PipelineAggregationHelperTests.MockBucket mockBucket : mockHisto) { for (double value : mockBucket.docValues) { builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource(jsonBuilder().startObject().field(INTERVAL_FIELD, mockBucket.key).field(VALUE_FIELD, value).endObject()) ); } @@ -241,7 +241,6 @@ private void setupExpected(MetricTarget target) { public void testBasicDiff() { SearchResponse response = client().prepareSearch("idx") - .setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD) .interval(interval) @@ -286,7 +285,6 @@ public void testBasicDiff() { public void testInvalidLagSize() { try { client().prepareSearch("idx") - .setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD) .interval(interval) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java index c06af8cbb2504..90b0aba10e40a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java @@ -88,7 +88,7 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, fieldValue) @@ -103,7 +103,8 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java index a7aab44c5cdae..873c43d8b0f4c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java @@ -88,7 +88,7 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, fieldValue) @@ -103,7 +103,8 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", "" + i) + client().prepareIndex("empty_bucket_idx") + .setId("" + i) .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchRedStateIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchRedStateIndexIT.java index fba41f6c04e08..3c2aa6642633e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchRedStateIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchRedStateIndexIT.java @@ -128,7 +128,7 @@ private void buildRedIndex(int numShards) throws Exception { ); ensureGreen(); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type1", "" + i).setSource("field1", "value1").get(); + client().prepareIndex("test").setId("" + i).setSource("field1", "value1").get(); } refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileCreatingIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileCreatingIndexIT.java index 5fac8b143516a..1d8512e101f78 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileCreatingIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileCreatingIndexIT.java @@ -78,7 +78,7 @@ private void searchWhileCreatingIndex(boolean createIndex, int numberOfReplicas) if (createIndex) { createIndex("test"); } - client().prepareIndex("test", "type1", id).setSource("field", "test").get(); + client().prepareIndex("test").setId(id).setSource("field", "test").get(); RefreshResponse refreshResponse = client().admin().indices().prepareRefresh("test").get(); // at least one shard should be successful when refreshing assertThat(refreshResponse.getSuccessfulShards(), greaterThanOrEqualTo(1)); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java index 291f4f9ac24cb..fedb6b18d93fb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java @@ -72,7 +72,8 @@ private void testSearchAndRelocateConcurrently(final int numberOfReplicas) throw final int numDocs = between(10, 20); for (int i = 0; i < numDocs; i++) { indexBuilders.add( - client().prepareIndex("test", "type", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource( jsonBuilder().startObject() .field("test", "value") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java index 8a6459d4083bb..9efb07fc7e581 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java @@ -50,7 +50,6 @@ import org.opensearch.common.settings.Settings.Builder; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.MockEngineFactoryPlugin; import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.Plugin; @@ -85,14 +84,12 @@ public void testRandomExceptions() throws IOException, InterruptedException, Exe String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("test") .field("type", "keyword") .endObject() .endObject() .endObject() - .endObject() ); final double lowLevelRate; final double topLevelRate; @@ -121,14 +118,15 @@ public void testRandomExceptions() throws IOException, InterruptedException, Exe .put(EXCEPTION_LOW_LEVEL_RATIO_KEY, lowLevelRate) .put(MockEngineSupport.WRAP_READER_RATIO.getKey(), 1.0d); logger.info("creating index: [test] using settings: [{}]", settings.build()); - assertAcked(prepareCreate("test").setSettings(settings).addMapping("type", mapping, XContentType.JSON)); + assertAcked(prepareCreate("test").setSettings(settings).setMapping(mapping)); ensureSearchable(); final int numDocs = between(10, 100); int numCreated = 0; boolean[] added = new boolean[numDocs]; for (int i = 0; i < numDocs; i++) { try { - IndexResponse indexResponse = client().prepareIndex("test", "type", "" + i) + IndexResponse indexResponse = client().prepareIndex("test") + .setId("" + i) .setTimeout(TimeValue.timeValueSeconds(1)) .setSource("test", English.intToEnglish(i)) .get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java index cec152ec8829e..094ab8a19c88b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java @@ -46,7 +46,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.Plugin; import org.opensearch.search.sort.SortOrder; @@ -73,14 +72,12 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("test") .field("type", "keyword") .endObject() .endObject() .endObject() - .endObject() ); final double exceptionRate; final double exceptionOnOpenRate; @@ -108,11 +105,11 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc if (createIndexWithoutErrors) { Settings.Builder settings = Settings.builder().put("index.number_of_replicas", numberOfReplicas()); logger.info("creating index: [test] using settings: [{}]", settings.build()); - client().admin().indices().prepareCreate("test").setSettings(settings).addMapping("type", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("test").setSettings(settings).setMapping(mapping).get(); numInitialDocs = between(10, 100); ensureGreen(); for (int i = 0; i < numInitialDocs; i++) { - client().prepareIndex("test", "type", "init" + i).setSource("test", "init").get(); + client().prepareIndex("test").setId("init" + i).setSource("test", "init").get(); } client().admin().indices().prepareRefresh("test").execute().get(); client().admin().indices().prepareFlush("test").execute().get(); @@ -134,7 +131,7 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc // we cannot expect that the index will be valid .put(MockFSDirectoryFactory.RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING.getKey(), exceptionOnOpenRate); logger.info("creating index: [test] using settings: [{}]", settings.build()); - client().admin().indices().prepareCreate("test").setSettings(settings).addMapping("type", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("test").setSettings(settings).setMapping(mapping).get(); } ClusterHealthResponse clusterHealthResponse = client().admin() .cluster() @@ -160,7 +157,8 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc for (int i = 0; i < numDocs; i++) { added[i] = false; try { - IndexResponse indexResponse = client().prepareIndex("test", "type", Integer.toString(i)) + IndexResponse indexResponse = client().prepareIndex("test") + .setId(Integer.toString(i)) .setTimeout(TimeValue.timeValueSeconds(1)) .setSource("test", English.intToEnglish(i)) .get(); @@ -192,7 +190,6 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc int expectedResults = added[docToQuery] ? 1 : 0; logger.info("Searching for [test:{}]", English.intToEnglish(docToQuery)); SearchResponse searchResponse = client().prepareSearch() - .setTypes("type") .setQuery(QueryBuilders.matchQuery("test", English.intToEnglish(docToQuery))) .setSize(expectedResults) .get(); @@ -202,7 +199,6 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc } // check match all searchResponse = client().prepareSearch() - .setTypes("type") .setQuery(QueryBuilders.matchAllQuery()) .setSize(numCreated + numInitialDocs) .addSort("_uid", SortOrder.ASC) @@ -239,10 +235,7 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc ); client().admin().indices().prepareOpen("test").execute().get(); ensureGreen(); - SearchResponse searchResponse = client().prepareSearch() - .setTypes("type") - .setQuery(QueryBuilders.matchQuery("test", "init")) - .get(); + SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.matchQuery("test", "init")).get(); assertNoFailures(searchResponse); assertHitCount(searchResponse, numInitialDocs); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportSearchFailuresIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportSearchFailuresIT.java index b44e4be011475..7982d9f5781fc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportSearchFailuresIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportSearchFailuresIT.java @@ -136,7 +136,7 @@ public void testFailedSearchWithWrongQuery() throws Exception { } private void index(Client client, String id, String nameValue, int age) throws IOException { - client.index(Requests.indexRequest("test").type("type").id(id).source(source(id, nameValue, age))).actionGet(); + client.index(Requests.indexRequest("test").id(id).source(source(id, nameValue, age))).actionGet(); } private XContentBuilder source(String id, String nameValue, int age) throws IOException { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportTwoNodesSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportTwoNodesSearchIT.java index 23ca51b830fe1..420121006a943 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportTwoNodesSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportTwoNodesSearchIT.java @@ -109,7 +109,7 @@ private Set prepareData(int numShards) throws Exception { } private void index(String id, String nameValue, int age) throws IOException { - client().index(Requests.indexRequest("test").type("type").id(id).source(source(id, nameValue, age))).actionGet(); + client().index(Requests.indexRequest("test").id(id).source(source(id, nameValue, age))).actionGet(); } private XContentBuilder source(String id, String nameValue, int age) throws IOException { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/ccs/CrossClusterSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/ccs/CrossClusterSearchIT.java index 27eae206ae19a..3258ced753211 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/ccs/CrossClusterSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/ccs/CrossClusterSearchIT.java @@ -71,7 +71,7 @@ protected boolean reuseClusters() { private int indexDocs(Client client, String index) { int numDocs = between(1, 10); for (int i = 0; i < numDocs; i++) { - client.prepareIndex(index, "_doc").setSource("f", "v").get(); + client.prepareIndex(index).setSource("f", "v").get(); } client.admin().indices().prepareRefresh(index).get(); return numDocs; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/FetchSubPhasePluginIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/FetchSubPhasePluginIT.java index 082a8df529a0b..68bac89213c57 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/FetchSubPhasePluginIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/FetchSubPhasePluginIT.java @@ -94,9 +94,8 @@ public void testPlugin() throws Exception { ) .get(); - client().index( - indexRequest("test").type("type1").id("1").source(jsonBuilder().startObject().field("test", "I am sam i am").endObject()) - ).actionGet(); + client().index(indexRequest("test").id("1").source(jsonBuilder().startObject().field("test", "I am sam i am").endObject())) + .actionGet(); client().admin().indices().prepareRefresh().get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java index 1635608b0b774..b16678d60fce7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java @@ -125,7 +125,8 @@ public void testSimpleNested() throws Exception { List requests = new ArrayList<>(); requests.add( - client().prepareIndex("articles", "article", "1") + client().prepareIndex("articles") + .setId("1") .setSource( jsonBuilder().startObject() .field("title", "quick brown fox") @@ -144,7 +145,8 @@ public void testSimpleNested() throws Exception { ) ); requests.add( - client().prepareIndex("articles", "article", "2") + client().prepareIndex("articles") + .setId("2") .setSource( jsonBuilder().startObject() .field("title", "big gray elephant") @@ -261,7 +263,7 @@ public void testRandomNested() throws Exception { source.startObject().field("x", "y").endObject(); } source.endArray().endObject(); - requestBuilders.add(client().prepareIndex("idx", "type", Integer.toString(i)).setSource(source)); + requestBuilders.add(client().prepareIndex("idx").setId(Integer.toString(i)).setSource(source)); } indexRandom(true, requestBuilders); @@ -343,7 +345,8 @@ public void testNestedMultipleLayers() throws Exception { List requests = new ArrayList<>(); requests.add( - client().prepareIndex("articles", "article", "1") + client().prepareIndex("articles") + .setId("1") .setSource( jsonBuilder().startObject() .field("title", "quick brown fox") @@ -369,7 +372,8 @@ public void testNestedMultipleLayers() throws Exception { ) ); requests.add( - client().prepareIndex("articles", "article", "2") + client().prepareIndex("articles") + .setId("2") .setSource( jsonBuilder().startObject() .field("title", "big gray elephant") @@ -544,7 +548,8 @@ public void testNestedDefinedAsObject() throws Exception { List requests = new ArrayList<>(); requests.add( - client().prepareIndex("articles", "article", "1") + client().prepareIndex("articles") + .setId("1") .setSource( jsonBuilder().startObject() .field("title", "quick brown fox") @@ -597,7 +602,8 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { List requests = new ArrayList<>(); requests.add( - client().prepareIndex("articles", "article", "1") + client().prepareIndex("articles") + .setId("1") .setSource( jsonBuilder().startObject() .field("title", "quick brown fox") @@ -700,7 +706,8 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { // index the message in an object form instead of an array requests = new ArrayList<>(); requests.add( - client().prepareIndex("articles", "article", "1") + client().prepareIndex("articles") + .setId("1") .setSource( jsonBuilder().startObject() .field("title", "quick brown fox") @@ -756,7 +763,8 @@ public void testMatchesQueriesNestedInnerHits() throws Exception { List requests = new ArrayList<>(); int numDocs = randomIntBetween(2, 35); requests.add( - client().prepareIndex("test", "type1", "0") + client().prepareIndex("test") + .setId("0") .setSource( jsonBuilder().startObject() .field("field1", 0) @@ -774,7 +782,8 @@ public void testMatchesQueriesNestedInnerHits() throws Exception { ) ); requests.add( - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("field1", 1) @@ -794,7 +803,8 @@ public void testMatchesQueriesNestedInnerHits() throws Exception { for (int i = 2; i < numDocs; i++) { requests.add( - client().prepareIndex("test", "type1", String.valueOf(i)) + client().prepareIndex("test") + .setId(String.valueOf(i)) .setSource( jsonBuilder().startObject() .field("field1", i) @@ -852,7 +862,8 @@ public void testMatchesQueriesNestedInnerHits() throws Exception { public void testNestedSource() throws Exception { assertAcked(prepareCreate("index1").addMapping("message", "comments", "type=nested")); - client().prepareIndex("index1", "message", "1") + client().prepareIndex("index1") + .setId("1") .setSource( jsonBuilder().startObject() .field("message", "quick brown fox") @@ -947,8 +958,8 @@ public void testNestedSource() throws Exception { public void testInnerHitsWithIgnoreUnmapped() throws Exception { assertAcked(prepareCreate("index1").addMapping("_doc", "nested_type", "type=nested")); createIndex("index2"); - client().prepareIndex("index1", "_doc", "1").setSource("nested_type", Collections.singletonMap("key", "value")).get(); - client().prepareIndex("index2", "type", "3").setSource("key", "value").get(); + client().prepareIndex("index1").setId("1").setSource("nested_type", Collections.singletonMap("key", "value")).get(); + client().prepareIndex("index2").setId("3").setSource("key", "value").get(); refresh(); SearchResponse response = client().prepareSearch("index1", "index2") @@ -971,7 +982,8 @@ public void testUseMaxDocInsteadOfSize() throws Exception { .prepareUpdateSettings("index2") .setSettings(Collections.singletonMap(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey(), ArrayUtil.MAX_ARRAY_LENGTH)) .get(); - client().prepareIndex("index2", "type", "1") + client().prepareIndex("index2") + .setId("1") .setSource( jsonBuilder().startObject().startArray("nested").startObject().field("field", "value1").endObject().endArray().endObject() ) @@ -988,7 +1000,8 @@ public void testUseMaxDocInsteadOfSize() throws Exception { public void testTooHighResultWindow() throws Exception { assertAcked(prepareCreate("index2").addMapping("type", "nested", "type=nested")); - client().prepareIndex("index2", "type", "1") + client().prepareIndex("index2") + .setId("1") .setSource( jsonBuilder().startObject().startArray("nested").startObject().field("field", "value1").endObject().endArray().endObject() ) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java index db3ec0f1232a4..488c253535827 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java @@ -61,9 +61,9 @@ public void testSimpleMatchedQueryFromFilteredQuery() throws Exception { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("name", "test1", "number", 1).get(); - client().prepareIndex("test", "type1", "2").setSource("name", "test2", "number", 2).get(); - client().prepareIndex("test", "type1", "3").setSource("name", "test3", "number", 3).get(); + client().prepareIndex("test").setId("1").setSource("name", "test1", "number", 1).get(); + client().prepareIndex("test").setId("2").setSource("name", "test2", "number", 2).get(); + client().prepareIndex("test").setId("3").setSource("name", "test3", "number", 3).get(); refresh(); SearchResponse searchResponse = client().prepareSearch() @@ -111,9 +111,9 @@ public void testSimpleMatchedQueryFromTopLevelFilter() throws Exception { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("name", "test", "title", "title1").get(); - client().prepareIndex("test", "type1", "2").setSource("name", "test").get(); - client().prepareIndex("test", "type1", "3").setSource("name", "test").get(); + client().prepareIndex("test").setId("1").setSource("name", "test", "title", "title1").get(); + client().prepareIndex("test").setId("2").setSource("name", "test").get(); + client().prepareIndex("test").setId("3").setSource("name", "test").get(); refresh(); SearchResponse searchResponse = client().prepareSearch() @@ -162,9 +162,9 @@ public void testSimpleMatchedQueryFromTopLevelFilterAndFilteredQuery() throws Ex createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("name", "test", "title", "title1").get(); - client().prepareIndex("test", "type1", "2").setSource("name", "test", "title", "title2").get(); - client().prepareIndex("test", "type1", "3").setSource("name", "test", "title", "title3").get(); + client().prepareIndex("test").setId("1").setSource("name", "test", "title", "title1").get(); + client().prepareIndex("test").setId("2").setSource("name", "test", "title", "title2").get(); + client().prepareIndex("test").setId("3").setSource("name", "test", "title", "title3").get(); refresh(); SearchResponse searchResponse = client().prepareSearch() @@ -202,7 +202,7 @@ public void testRegExpQuerySupportsName() { createIndex("test1"); ensureGreen(); - client().prepareIndex("test1", "type1", "1").setSource("title", "title1").get(); + client().prepareIndex("test1").setId("1").setSource("title", "title1").get(); refresh(); SearchResponse searchResponse = client().prepareSearch() @@ -224,7 +224,7 @@ public void testPrefixQuerySupportsName() { createIndex("test1"); ensureGreen(); - client().prepareIndex("test1", "type1", "1").setSource("title", "title1").get(); + client().prepareIndex("test1").setId("1").setSource("title", "title1").get(); refresh(); SearchResponse searchResponse = client().prepareSearch() @@ -246,7 +246,7 @@ public void testFuzzyQuerySupportsName() { createIndex("test1"); ensureGreen(); - client().prepareIndex("test1", "type1", "1").setSource("title", "title1").get(); + client().prepareIndex("test1").setId("1").setSource("title", "title1").get(); refresh(); SearchResponse searchResponse = client().prepareSearch() @@ -268,7 +268,7 @@ public void testWildcardQuerySupportsName() { createIndex("test1"); ensureGreen(); - client().prepareIndex("test1", "type1", "1").setSource("title", "title1").get(); + client().prepareIndex("test1").setId("1").setSource("title", "title1").get(); refresh(); SearchResponse searchResponse = client().prepareSearch() @@ -290,7 +290,7 @@ public void testSpanFirstQuerySupportsName() { createIndex("test1"); ensureGreen(); - client().prepareIndex("test1", "type1", "1").setSource("title", "title1 title2").get(); + client().prepareIndex("test1").setId("1").setSource("title", "title1 title2").get(); refresh(); SearchResponse searchResponse = client().prepareSearch() @@ -315,8 +315,8 @@ public void testMatchedWithShould() throws Exception { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("content", "Lorem ipsum dolor sit amet").get(); - client().prepareIndex("test", "type1", "2").setSource("content", "consectetur adipisicing elit").get(); + client().prepareIndex("test").setId("1").setSource("content", "Lorem ipsum dolor sit amet").get(); + client().prepareIndex("test").setId("2").setSource("content", "consectetur adipisicing elit").get(); refresh(); // Execute search at least two times to load it in cache @@ -349,7 +349,7 @@ public void testMatchedWithWrapperQuery() throws Exception { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("content", "Lorem ipsum dolor sit amet").get(); + client().prepareIndex("test").setId("1").setSource("content", "Lorem ipsum dolor sit amet").get(); refresh(); MatchQueryBuilder matchQueryBuilder = matchQuery("content", "amet").queryName("abc"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java index de7000de6f208..7df5b9b88a69c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java @@ -63,15 +63,13 @@ protected Collection> nodePlugins() { protected void setup() throws Exception { indexRandom( true, - client().prepareIndex("test", "test", "1") - .setSource("name", "arbitrary content", "other_name", "foo", "other_other_name", "bar"), - client().prepareIndex("test", "test", "2").setSource("other_name", "foo", "other_other_name", "bar") + client().prepareIndex("test").setId("1").setSource("name", "arbitrary content", "other_name", "foo", "other_other_name", "bar"), + client().prepareIndex("test").setId("2").setSource("other_name", "foo", "other_other_name", "bar") ); } public void testThatCustomHighlightersAreSupported() throws IOException { SearchResponse searchResponse = client().prepareSearch("test") - .setTypes("test") .setQuery(QueryBuilders.matchAllQuery()) .highlighter(new HighlightBuilder().field("name").highlighterType("test-custom")) .get(); @@ -86,7 +84,6 @@ public void testThatCustomHighlighterCanBeConfiguredPerField() throws Exception highlightConfig.options(options); SearchResponse searchResponse = client().prepareSearch("test") - .setTypes("test") .setQuery(QueryBuilders.matchAllQuery()) .highlighter(new HighlightBuilder().field(highlightConfig)) .get(); @@ -100,7 +97,6 @@ public void testThatCustomHighlighterCanBeConfiguredGlobally() throws Exception options.put("myGlobalOption", "someValue"); SearchResponse searchResponse = client().prepareSearch("test") - .setTypes("test") .setQuery(QueryBuilders.matchAllQuery()) .highlighter(new HighlightBuilder().field("name").highlighterType("test-custom").options(options)) .get(); @@ -111,7 +107,6 @@ public void testThatCustomHighlighterCanBeConfiguredGlobally() throws Exception public void testThatCustomHighlighterReceivesFieldsInOrder() throws Exception { SearchResponse searchResponse = client().prepareSearch("test") - .setTypes("test") .setQuery(QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).should(QueryBuilders.termQuery("name", "arbitrary"))) .highlighter( new HighlightBuilder().highlighterType("test-custom") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 1918b43fb780c..f0fe5e4479b76 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -51,7 +51,6 @@ import org.opensearch.common.time.DateFormatter; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.analysis.AbstractIndexAnalyzerProvider; import org.opensearch.index.analysis.AnalyzerProvider; import org.opensearch.index.analysis.PreConfiguredTokenFilter; @@ -152,11 +151,11 @@ public void testHighlightingWithKeywordIgnoreBoundaryScanner() throws IOExceptio .endObject(); mappings.endObject(); assertAcked(prepareCreate("test").addMapping("type", mappings)); - client().prepareIndex("test", "_doc") + client().prepareIndex("test") .setId("1") .setSource(jsonBuilder().startObject().array("tags", "foo bar", "foo bar", "foo bar", "foo baz").field("sort", 1).endObject()) .get(); - client().prepareIndex("test", "_doc") + client().prepareIndex("test") .setId("2") .setSource(jsonBuilder().startObject().array("tags", "foo baz", "foo baz", "foo baz", "foo bar").field("sort", 2).endObject()) .get(); @@ -187,7 +186,7 @@ public void testHighlightingWithStoredKeyword() throws IOException { .endObject(); mappings.endObject(); assertAcked(prepareCreate("test").addMapping("type", mappings)); - client().prepareIndex("test", "type", "1").setSource(jsonBuilder().startObject().field("text", "foo").endObject()).get(); + client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("text", "foo").endObject()).get(); refresh(); SearchResponse search = client().prepareSearch() .setQuery(matchQuery("text", "foo")) @@ -212,7 +211,7 @@ public void testHighlightingWithWildcardName() throws IOException { .endObject(); mappings.endObject(); assertAcked(prepareCreate("test").addMapping("type", mappings)); - client().prepareIndex("test", "type", "1").setSource(jsonBuilder().startObject().field("text", "text").endObject()).get(); + client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("text", "text").endObject()).get(); refresh(); for (String type : ALL_TYPES) { SearchResponse search = client().prepareSearch() @@ -241,7 +240,7 @@ public void testFieldAlias() throws IOException { .endObject(); assertAcked(prepareCreate("test").addMapping("type", mappings)); - client().prepareIndex("test", "type", "1").setSource("text", "foo").get(); + client().prepareIndex("test").setId("1").setSource("text", "foo").get(); refresh(); for (String type : ALL_TYPES) { @@ -271,7 +270,7 @@ public void testFieldAliasWithSourceLookup() throws IOException { .endObject(); assertAcked(prepareCreate("test").addMapping("type", mappings)); - client().prepareIndex("test", "type", "1").setSource("text", "foo bar").get(); + client().prepareIndex("test").setId("1").setSource("text", "foo bar").get(); refresh(); for (String type : ALL_TYPES) { @@ -298,7 +297,7 @@ public void testFieldAliasWithWildcardField() throws IOException { .endObject(); assertAcked(prepareCreate("test").addMapping("type", mappings)); - client().prepareIndex("test", "type", "1").setSource("keyword", "foo").get(); + client().prepareIndex("test").setId("1").setSource("keyword", "foo").get(); refresh(); HighlightBuilder builder = new HighlightBuilder().field(new Field("al*")).requireFieldMatch(false); @@ -330,7 +329,8 @@ public void testHighlightingWhenFieldsAreNotStoredThereIsNoSource() throws IOExc .endObject(); mappings.endObject(); assertAcked(prepareCreate("test").addMapping("type", mappings)); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("unstored_text", "text").field("text", "text").endObject()) .get(); refresh(); @@ -358,7 +358,7 @@ public void testHighTermFrequencyDoc() throws IOException { for (int i = 0; i < 6000; i++) { builder.append("abc").append(" "); } - client().prepareIndex("test", "test", "1").setSource("name", builder.toString()).get(); + client().prepareIndex("test").setId("1").setSource("name", builder.toString()).get(); refresh(); SearchResponse search = client().prepareSearch() .setQuery(constantScoreQuery(matchQuery("name", "abc"))) @@ -378,7 +378,8 @@ public void testEnsureNoNegativeOffsets() throws Exception { ) ); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( "no_long_term", "This is a test where foo is highlighed and should be highlighted", @@ -437,7 +438,8 @@ public void testSourceLookupHighlightingUsingPlainHighlighter() throws Exception IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource( XContentFactory.jsonBuilder() .startObject() @@ -506,7 +508,8 @@ public void testSourceLookupHighlightingUsingFastVectorHighlighter() throws Exce IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource( XContentFactory.jsonBuilder() .startObject() @@ -575,7 +578,8 @@ public void testSourceLookupHighlightingUsingPostingsHighlighter() throws Except IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource( XContentFactory.jsonBuilder() .startObject() @@ -654,12 +658,11 @@ public void testHighlightIssue1994() throws Exception { ); String[] titles = new String[] { "This is a test on the highlighting bug present in opensearch", "The bug is bugging us" }; - indexRandom(false, client().prepareIndex("test", "type1", "1").setSource("title", titles, "titleTV", titles)); + indexRandom(false, client().prepareIndex("test").setId("1").setSource("title", titles, "titleTV", titles)); indexRandom( true, - client().prepareIndex("test", "type1", "2") - .setSource("titleTV", new String[] { "some text to highlight", "highlight other text" }) + client().prepareIndex("test").setId("2").setSource("titleTV", new String[] { "some text to highlight", "highlight other text" }) ); SearchResponse search = client().prepareSearch() @@ -685,7 +688,7 @@ public void testGlobalHighlightingSettingsOverriddenAtFieldLevel() { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource( "field1", new String[] { "this is a test", "this is the second test" }, @@ -734,7 +737,7 @@ public void testHighlightingOnWildcardFields() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource( "field-postings", "This is the first test sentence. Here is the second one.", @@ -793,7 +796,7 @@ public void testForceSourceWithSourceDisabled() throws Exception { ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource("field1", "The quick brown fox jumps over the lazy dog", "field2", "second field content") .get(); refresh(); @@ -835,9 +838,7 @@ public void testForceSourceWithSourceDisabled() throws Exception { public void testPlainHighlighter() throws Exception { ensureGreen(); - client().prepareIndex("test", "type1") - .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog") - .get(); + client().prepareIndex("test").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog").get(); refresh(); logger.info("--> highlighting and searching on field1"); @@ -856,8 +857,7 @@ public void testFastVectorHighlighter() throws Exception { indexRandom( true, - client().prepareIndex("test", "type1") - .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog") + client().prepareIndex("test").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog") ); logger.info("--> highlighting and searching on field1"); @@ -891,7 +891,7 @@ public void testHighlighterWithSentenceBoundaryScanner() throws Exception { indexRandom( true, - client().prepareIndex("test", "type1").setSource("field1", "A sentence with few words. Another sentence with even more words.") + client().prepareIndex("test").setSource("field1", "A sentence with few words. Another sentence with even more words.") ); for (String type : new String[] { "unified", "fvh" }) { @@ -932,7 +932,7 @@ public void testHighlighterWithSentenceBoundaryScannerAndLocale() throws Excepti indexRandom( true, - client().prepareIndex("test", "type1").setSource("field1", "A sentence with few words. Another sentence with even more words.") + client().prepareIndex("test").setSource("field1", "A sentence with few words. Another sentence with even more words.") ); for (String type : new String[] { "fvh", "unified" }) { @@ -973,10 +973,7 @@ public void testHighlighterWithWordBoundaryScanner() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); ensureGreen(); - indexRandom( - true, - client().prepareIndex("test", "type1").setSource("field1", "some quick and hairy brown:fox jumped over the lazy dog") - ); + indexRandom(true, client().prepareIndex("test").setSource("field1", "some quick and hairy brown:fox jumped over the lazy dog")); logger.info("--> highlighting and searching on 'field' with word boundary_scanner"); for (String type : new String[] { "unified", "fvh" }) { @@ -1006,10 +1003,7 @@ public void testHighlighterWithWordBoundaryScannerAndLocale() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); ensureGreen(); - indexRandom( - true, - client().prepareIndex("test", "type1").setSource("field1", "some quick and hairy brown:fox jumped over the lazy dog") - ); + indexRandom(true, client().prepareIndex("test").setSource("field1", "some quick and hairy brown:fox jumped over the lazy dog")); for (String type : new String[] { "unified", "fvh" }) { SearchSourceBuilder source = searchSource().query(termQuery("field1", "some")) @@ -1046,7 +1040,7 @@ public void testFVHManyMatches() throws Exception { // Index one megabyte of "t " over and over and over again String pattern = "t "; String value = new String(new char[1024 * 256 / pattern.length()]).replace("\0", pattern); - client().prepareIndex("test", "type1").setSource("field1", value).get(); + client().prepareIndex("test").setSource("field1", value).get(); refresh(); logger.info("--> highlighting and searching on field1 with default phrase limit"); @@ -1282,7 +1276,7 @@ public void testFastVectorHighlighterManyDocs() throws Exception { int COUNT = between(20, 100); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[COUNT]; for (int i = 0; i < COUNT; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field1", "test " + i); + indexRequestBuilders[i] = client().prepareIndex("test").setId(Integer.toString(i)).setSource("field1", "test " + i); } logger.info("--> indexing docs"); indexRandom(true, indexRequestBuilders); @@ -1323,7 +1317,8 @@ public void testSameContent() throws Exception { IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource("title", "This is a test on the highlighting bug present in opensearch"); } indexRandom(true, indexRequestBuilders); @@ -1350,7 +1345,8 @@ public void testFastVectorHighlighterOffsetParameter() throws Exception { IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource("title", "This is a test on the highlighting bug present in opensearch"); } indexRandom(true, indexRequestBuilders); @@ -1371,7 +1367,8 @@ public void testEscapeHtml() throws Exception { IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource("title", "This is a html escaping highlighting test for *&? opensearch"); } indexRandom(true, indexRequestBuilders); @@ -1391,7 +1388,8 @@ public void testEscapeHtmlVector() throws Exception { IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource("title", "This is a html escaping highlighting test for *&? opensearch"); } indexRandom(true, indexRequestBuilders); @@ -1433,7 +1431,7 @@ public void testMultiMapperVectorWithStore() throws Exception { ) ); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); + client().prepareIndex("test").setId("1").setSource("title", "this is a test").get(); refresh(); // simple search on body with standard analyzer with a simple field query @@ -1481,7 +1479,7 @@ public void testMultiMapperVectorFromSource() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); + client().prepareIndex("test").setId("1").setSource("title", "this is a test").get(); refresh(); // simple search on body with standard analyzer with a simple field query @@ -1529,7 +1527,7 @@ public void testMultiMapperNoVectorWithStore() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); + client().prepareIndex("test").setId("1").setSource("title", "this is a test").get(); refresh(); // simple search on body with standard analyzer with a simple field query @@ -1576,7 +1574,7 @@ public void testMultiMapperNoVectorFromSource() throws Exception { ) ); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); + client().prepareIndex("test").setId("1").setSource("title", "this is a test").get(); refresh(); // simple search on body with standard analyzer with a simple field query @@ -1602,7 +1600,8 @@ public void testFastVectorHighlighterShouldFailIfNoTermVectors() throws Exceptio IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource("title", "This is a test for the enabling fast vector highlighter"); } indexRandom(true, indexRequestBuilders); @@ -1640,7 +1639,8 @@ public void testDisableFastVectorHighlighter() throws Exception { IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource("title", "This is a test for the workaround for the fast vector highlighting SOLR-3724"); } indexRandom(true, indexRequestBuilders); @@ -1695,7 +1695,8 @@ public void testDisableFastVectorHighlighter() throws Exception { public void testFSHHighlightAllMvFragments() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", "tags", "type=text,term_vector=with_positions_offsets")); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( "tags", new String[] { @@ -1724,9 +1725,7 @@ public void testFSHHighlightAllMvFragments() throws Exception { public void testBoostingQuery() { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type1") - .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog") - .get(); + client().prepareIndex("test").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog").get(); refresh(); logger.info("--> highlighting and searching on field1"); @@ -1742,9 +1741,7 @@ public void testBoostingQuery() { public void testBoostingQueryTermVector() throws IOException { assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); ensureGreen(); - client().prepareIndex("test", "type1") - .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog") - .get(); + client().prepareIndex("test").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog").get(); refresh(); logger.info("--> highlighting and searching on field1"); @@ -1761,9 +1758,7 @@ public void testCommonTermsQuery() { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type1") - .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog") - .get(); + client().prepareIndex("test").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog").get(); refresh(); logger.info("--> highlighting and searching on field1"); @@ -1778,9 +1773,7 @@ public void testCommonTermsTermVector() throws IOException { assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); ensureGreen(); - client().prepareIndex("test", "type1") - .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog") - .get(); + client().prepareIndex("test").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog").get(); refresh(); logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource().query(commonTermsQuery("field2", "quick brown").cutoffFrequency(100)) @@ -1794,7 +1787,8 @@ public void testCommonTermsTermVector() throws IOException { public void testPlainHighlightDifferentFragmenter() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", "tags", "type=text")); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .array( @@ -1905,7 +1899,7 @@ public void testFastVectorHighlighterMultipleFields() { public void testMissingStoredField() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", "highlight_field", "type=text,store=true")); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject().field("field", "highlight").endObject()).get(); + client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("field", "highlight").endObject()).get(); refresh(); // This query used to fail when the field to highlight was absent @@ -1943,7 +1937,8 @@ public void testNumericHighlighting() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "test", "1") + client().prepareIndex("test") + .setId("1") .setSource("text", "opensearch test", "byte", 25, "short", 42, "int", 100, "long", -1, "float", 3.2f, "double", 42.42) .get(); refresh(); @@ -1967,7 +1962,7 @@ public void testResetTwice() throws Exception { ).addMapping("type", "text", "type=text,analyzer=my_analyzer") ); ensureGreen(); - client().prepareIndex("test", "type", "1").setSource("text", "opensearch test").get(); + client().prepareIndex("test").setId("1").setSource("text", "opensearch test").get(); refresh(); SearchResponse response = client().prepareSearch("test") @@ -2171,7 +2166,7 @@ public void testHighlightNoMatchSizeWithMultivaluedFields() throws IOException { index("test", "type1", "2", "text", new String[] { "", text2 }); refresh(); - IdsQueryBuilder idsQueryBuilder = QueryBuilders.idsQuery("type1").addIds("2"); + IdsQueryBuilder idsQueryBuilder = QueryBuilders.idsQuery().addIds("2"); field.highlighterType("plain"); response = client().prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); @@ -2188,7 +2183,7 @@ public void testHighlightNoMatchSizeWithMultivaluedFields() throws IOException { // But if the field was actually empty then you should get no highlighting field index("test", "type1", "3", "text", new String[] {}); refresh(); - idsQueryBuilder = QueryBuilders.idsQuery("type1").addIds("3"); + idsQueryBuilder = QueryBuilders.idsQuery().addIds("3"); field.highlighterType("plain"); response = client().prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); @@ -2205,7 +2200,7 @@ public void testHighlightNoMatchSizeWithMultivaluedFields() throws IOException { index("test", "type1", "4"); refresh(); - idsQueryBuilder = QueryBuilders.idsQuery("type1").addIds("4"); + idsQueryBuilder = QueryBuilders.idsQuery().addIds("4"); field.highlighterType("plain"); response = client().prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); @@ -2286,7 +2281,7 @@ public void testPostingsHighlighter() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy quick dog") .get(); refresh(); @@ -2379,7 +2374,8 @@ public void testPostingsHighlighterNumberOfFragments() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( "field1", "The quick brown fox jumps over the lazy dog. The lazy red fox jumps over the quick dog. " @@ -2411,7 +2407,8 @@ public void testPostingsHighlighterNumberOfFragments() throws Exception { ); assertHighlight(searchResponse, 0, "field1", 1, 2, equalTo("The quick brown dog jumps over the lazy fox.")); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( "field1", new String[] { @@ -2477,7 +2474,7 @@ public void testMultiMatchQueryHighlight() throws IOException { .endObject(); assertAcked(prepareCreate("test").addMapping("type1", mapping)); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource("field1", "The quick brown fox jumps over", "field2", "The quick brown fox jumps over") .get(); refresh(); @@ -2513,7 +2510,7 @@ public void testPostingsHighlighterOrderByScore() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource( "field1", new String[] { @@ -2563,7 +2560,8 @@ public void testPostingsHighlighterEscapeHtml() throws Exception { IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource("title", "This is a html escaping highlighting test for *&? opensearch"); } indexRandom(true, indexRequestBuilders); @@ -2612,7 +2610,7 @@ public void testPostingsHighlighterMultiMapperWithStore() throws Exception { ) ); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("title", "this is a test . Second sentence.").get(); + client().prepareIndex("test").setId("1").setSource("title", "this is a test . Second sentence.").get(); refresh(); // simple search on body with standard analyzer with a simple field query @@ -2673,7 +2671,7 @@ public void testPostingsHighlighterMultiMapperFromSource() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); + client().prepareIndex("test").setId("1").setSource("title", "this is a test").get(); refresh(); // simple search on body with standard analyzer with a simple field query @@ -2714,7 +2712,8 @@ public void testPostingsHighlighterShouldFailIfNoOffsets() throws Exception { IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource("title", "This is a test for the postings highlighter"); } indexRandom(true, indexRequestBuilders); @@ -2729,7 +2728,7 @@ public void testPostingsHighlighterShouldFailIfNoOffsets() throws Exception { public void testPostingsHighlighterBoostingQuery() throws IOException { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.") .get(); refresh(); @@ -2747,7 +2746,7 @@ public void testPostingsHighlighterCommonTermsQuery() throws IOException { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.") .get(); refresh(); @@ -2790,7 +2789,7 @@ public void testPostingsHighlighterPrefixQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.") .get(); refresh(); @@ -2812,7 +2811,7 @@ public void testPostingsHighlighterFuzzyQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.") .get(); refresh(); @@ -2835,7 +2834,7 @@ public void testPostingsHighlighterRegexpQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.") .get(); refresh(); @@ -2858,7 +2857,7 @@ public void testPostingsHighlighterWildcardQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.") .get(); refresh(); @@ -2894,7 +2893,7 @@ public void testPostingsHighlighterTermRangeQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "aaab").get(); + client().prepareIndex("test").setSource("field1", "this is a test", "field2", "aaab").get(); refresh(); logger.info("--> highlighting and searching on field2"); @@ -2909,7 +2908,7 @@ public void testPostingsHighlighterQueryString() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1") + client().prepareIndex("test") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.") .get(); refresh(); @@ -2932,7 +2931,7 @@ public void testPostingsHighlighterRegexpQueryWithinConstantScoreQuery() throws assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "The photography word will get highlighted").get(); + client().prepareIndex("test").setSource("field1", "The photography word will get highlighted").get(); refresh(); logger.info("--> highlighting and searching on field1"); @@ -2946,7 +2945,7 @@ public void testPostingsHighlighterMultiTermQueryMultipleLevels() throws Excepti assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "The photography word will get highlighted").get(); + client().prepareIndex("test").setSource("field1", "The photography word will get highlighted").get(); refresh(); logger.info("--> highlighting and searching on field1"); @@ -2963,7 +2962,7 @@ public void testPostingsHighlighterPrefixQueryWithinBooleanQuery() throws Except assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "The photography word will get highlighted").get(); + client().prepareIndex("test").setSource("field1", "The photography word will get highlighted").get(); refresh(); logger.info("--> highlighting and searching on field1"); @@ -2978,7 +2977,7 @@ public void testPostingsHighlighterQueryStringWithinFilteredQuery() throws Excep assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "The photography word will get highlighted").get(); + client().prepareIndex("test").setSource("field1", "The photography word will get highlighted").get(); refresh(); logger.info("--> highlighting and searching on field1"); @@ -3002,7 +3001,8 @@ public void testPostingsHighlighterManyDocs() throws Exception { // (https://github.com/elastic/elasticsearch/issues/4103) String prefix = randomAlphaOfLengthBetween(5, 30); prefixes.put(String.valueOf(i), prefix); - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource("field1", "Sentence " + prefix + " test. Sentence two."); } logger.info("--> indexing docs"); @@ -3038,11 +3038,10 @@ public void testDoesNotHighlightTypeName() throws Exception { assertAcked(prepareCreate("test").addMapping("typename", mapping)); ensureGreen(); - indexRandom(true, client().prepareIndex("test", "typename").setSource("foo", "test typename")); + indexRandom(true, client().prepareIndex("test").setSource("foo", "test typename")); for (String highlighter : ALL_TYPES) { SearchResponse response = client().prepareSearch("test") - .setTypes("typename") .setQuery(matchQuery("foo", "test")) .highlighter(new HighlightBuilder().field("foo").highlighterType(highlighter).requireFieldMatch(false)) .get(); @@ -3067,11 +3066,10 @@ public void testDoesNotHighlightAliasFilters() throws Exception { assertAcked(client().admin().indices().prepareAliases().addAlias("test", "filtered_alias", matchQuery("foo", "japanese"))); ensureGreen(); - indexRandom(true, client().prepareIndex("test", "typename").setSource("foo", "test japanese")); + indexRandom(true, client().prepareIndex("test").setSource("foo", "test japanese")); for (String highlighter : ALL_TYPES) { SearchResponse response = client().prepareSearch("filtered_alias") - .setTypes("typename") .setQuery(matchQuery("foo", "test")) .highlighter(new HighlightBuilder().field("foo").highlighterType(highlighter).requireFieldMatch(false)) .get(); @@ -3191,7 +3189,8 @@ public void testGeoFieldHighlightingWithDifferentHighlighters() throws IOExcepti mappings.endObject(); assertAcked(prepareCreate("test").addMapping("type", mappings)); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("text", "Arbitrary text field which will should not cause a failure").endObject()) .get(); refresh(); @@ -3231,7 +3230,8 @@ public void testGeoFieldHighlightingWhenQueryGetsRewritten() throws IOException assertAcked(prepareCreate("test").addMapping("jobs", mappings)); ensureYellow(); - client().prepareIndex("test", "jobs", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("jd", "some आवश्यकता है- आर्य समाज अनाथालय, 68 सिविल लाइन्स, बरेली को एक पुरूष" + " रस text") @@ -3269,7 +3269,8 @@ public void testKeywordFieldHighlighting() throws IOException { mappings.endObject(); assertAcked(prepareCreate("test").addMapping("type", mappings)); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("keyword_field", "some text").endObject()) .get(); refresh(); @@ -3290,7 +3291,6 @@ public void testKeywordFieldHighlighting() throws IOException { public void testACopyFieldWithNestedQuery() throws Exception { String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("foo") .field("type", "nested") @@ -3308,11 +3308,11 @@ public void testACopyFieldWithNestedQuery() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - prepareCreate("test").addMapping("type", mapping, XContentType.JSON).get(); + prepareCreate("test").setMapping(mapping).get(); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .startArray("foo") @@ -3340,7 +3340,8 @@ public void testACopyFieldWithNestedQuery() throws Exception { } public void testFunctionScoreQueryHighlight() throws Exception { - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("text", "brown").endObject()) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); @@ -3356,7 +3357,8 @@ public void testFunctionScoreQueryHighlight() throws Exception { } public void testFiltersFunctionScoreQueryHighlight() throws Exception { - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("text", "brown").field("enable", "yes").endObject()) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); @@ -3393,9 +3395,9 @@ public void testHighlightQueryRewriteDatesWithNow() throws Exception { DateFormatter formatter = DateFormatter.forPattern("strict_date_optional_time"); indexRandom( true, - client().prepareIndex("index-1", "type", "1").setSource("d", formatter.format(now), "field", "hello world"), - client().prepareIndex("index-1", "type", "2").setSource("d", formatter.format(now.minusDays(1)), "field", "hello"), - client().prepareIndex("index-1", "type", "3").setSource("d", formatter.format(now.minusDays(2)), "field", "world") + client().prepareIndex("index-1").setId("1").setSource("d", formatter.format(now), "field", "hello world"), + client().prepareIndex("index-1").setId("2").setSource("d", formatter.format(now.minusDays(1)), "field", "hello"), + client().prepareIndex("index-1").setId("3").setSource("d", formatter.format(now.minusDays(2)), "field", "world") ); ensureSearchable("index-1"); for (int i = 0; i < 5; i++) { @@ -3419,7 +3421,6 @@ public void testHighlightQueryRewriteDatesWithNow() throws Exception { public void testWithNestedQuery() throws Exception { String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("text") .field("type", "text") @@ -3436,11 +3437,11 @@ public void testWithNestedQuery() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - prepareCreate("test").addMapping("type", mapping, XContentType.JSON).get(); + prepareCreate("test").setMapping(mapping).get(); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .startArray("foo") @@ -3510,7 +3511,8 @@ public void testWithNormalizer() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "doc", "0") + client().prepareIndex("test") + .setId("0") .setSource("keyword", "Hello World") .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); @@ -3531,7 +3533,8 @@ public void testDisableHighlightIdField() throws Exception { assertAcked(prepareCreate("test").addMapping("doc", "keyword", "type=keyword")); ensureGreen(); - client().prepareIndex("test", "doc", "d33f85bf1e51e84d9ab38948db9f3a068e1fe5294f1d8603914ac8c7bcc39ca1") + client().prepareIndex("test") + .setId("d33f85bf1e51e84d9ab38948db9f3a068e1fe5294f1d8603914ac8c7bcc39ca1") .setSource("keyword", "Hello World") .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java index d9013a61e2e08..8767904e03c72 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java @@ -226,10 +226,10 @@ public void testWithIndexFilter() throws InterruptedException { assertAcked(prepareCreate("index-2").addMapping("_doc", "timestamp", "type=date", "field1", "type=long")); List reqs = new ArrayList<>(); - reqs.add(client().prepareIndex("index-1", "_doc").setSource("timestamp", "2015-07-08")); - reqs.add(client().prepareIndex("index-1", "_doc").setSource("timestamp", "2018-07-08")); - reqs.add(client().prepareIndex("index-2", "_doc").setSource("timestamp", "2019-10-12")); - reqs.add(client().prepareIndex("index-2", "_doc").setSource("timestamp", "2020-07-08")); + reqs.add(client().prepareIndex("index-1").setSource("timestamp", "2015-07-08")); + reqs.add(client().prepareIndex("index-1").setSource("timestamp", "2018-07-08")); + reqs.add(client().prepareIndex("index-2").setSource("timestamp", "2019-10-12")); + reqs.add(client().prepareIndex("index-2").setSource("timestamp", "2020-07-08")); indexRandom(true, reqs); FieldCapabilitiesResponse response = client().prepareFieldCaps("index-*").setFields("*").get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java index dc57ddf80d28d..72c60e98ec328 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java @@ -48,6 +48,7 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.support.XContentMapValues; import org.opensearch.index.fielddata.ScriptDocValues; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.Plugin; import org.opensearch.rest.RestStatus; @@ -187,7 +188,7 @@ public void testStoredFields() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("field1") .field("type", "text") @@ -206,9 +207,10 @@ public void testStoredFields() throws Exception { .endObject() ); - client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).get(); + client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject().field("field1", "value1").field("field2", "value2").field("field3", "value3").endObject() ) @@ -289,7 +291,7 @@ public void testScriptDocAndFields() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("num1") .field("type", "double") @@ -300,21 +302,24 @@ public void testScriptDocAndFields() throws Exception { .endObject() ); - client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).get(); + client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject().field("test", "value beck").field("num1", 1.0f).field("date", "1970-01-01T00:00:00").endObject() ) .get(); client().admin().indices().prepareFlush().get(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject().field("test", "value beck").field("num1", 2.0f).field("date", "1970-01-01T00:00:25").endObject() ) .get(); client().admin().indices().prepareFlush().get(); - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject().field("test", "value beck").field("num1", 3.0f).field("date", "1970-01-01T00:02:00").endObject() ) @@ -388,7 +393,7 @@ public void testScriptFieldWithNanos() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("doc") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("date") .field("type", "date_nanos") @@ -398,14 +403,15 @@ public void testScriptFieldWithNanos() throws Exception { .endObject() ); - client().admin().indices().preparePutMapping().setType("doc").setSource(mapping, XContentType.JSON).get(); + client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get(); String date = "2019-01-31T10:00:00.123456789Z"; indexRandom( true, false, - client().prepareIndex("test", "doc", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("date", "1970-01-01T00:00:00.000Z").endObject()), - client().prepareIndex("test", "doc", "2").setSource(jsonBuilder().startObject().field("date", date).endObject()) + client().prepareIndex("test").setId("2").setSource(jsonBuilder().startObject().field("date", date).endObject()) ); SearchResponse response = client().prepareSearch() @@ -443,7 +449,8 @@ public void testIdBasedScriptFields() throws Exception { int numDocs = randomIntBetween(1, 30); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource(jsonBuilder().startObject().field("num1", i).endObject()); } indexRandom(true, indexRequestBuilders); @@ -479,7 +486,7 @@ public void testIdBasedScriptFields() throws Exception { assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); Set fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet()); assertThat(fields, equalTo(singleton("type"))); - assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo("type1")); + assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo(MapperService.SINGLE_MAPPING_NAME)); } response = client().prepareSearch() @@ -497,7 +504,7 @@ public void testIdBasedScriptFields() throws Exception { assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); Set fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet()); assertThat(fields, equalTo(newHashSet("type", "id"))); - assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo("type1")); + assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo(MapperService.SINGLE_MAPPING_NAME)); assertThat(response.getHits().getAt(i).getFields().get("id").getValue(), equalTo(Integer.toString(i))); } } @@ -505,7 +512,8 @@ public void testIdBasedScriptFields() throws Exception { public void testScriptFieldUsingSource() throws Exception { createIndex("test"); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .startObject("obj1") @@ -566,7 +574,7 @@ public void testScriptFieldUsingSource() throws Exception { } public void testScriptFieldsForNullReturn() throws Exception { - client().prepareIndex("test", "type1", "1").setSource("foo", "bar").setRefreshPolicy("true").get(); + client().prepareIndex("test").setId("1").setSource("foo", "bar").setRefreshPolicy("true").get(); SearchResponse response = client().prepareSearch() .setQuery(matchAllQuery()) @@ -585,7 +593,8 @@ public void testScriptFieldsForNullReturn() throws Exception { public void testPartialFields() throws Exception { createIndex("test"); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( XContentFactory.jsonBuilder() .startObject() @@ -617,7 +626,7 @@ public void testStoredFieldsWithoutSource() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("_source") .field("enabled", false) .endObject() @@ -663,10 +672,11 @@ public void testStoredFieldsWithoutSource() throws Exception { .endObject() ); - client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).get(); + client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get(); ZonedDateTime date = ZonedDateTime.of(2012, 3, 22, 0, 0, 0, 0, ZoneOffset.UTC); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("byte_field", (byte) 1) @@ -731,17 +741,14 @@ public void testStoredFieldsWithoutSource() throws Exception { } public void testSearchFieldsMetadata() throws Exception { - client().prepareIndex("my-index", "my-type1", "1") + client().prepareIndex("my-index") + .setId("1") .setRouting("1") .setSource(jsonBuilder().startObject().field("field1", "value").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - SearchResponse searchResponse = client().prepareSearch("my-index") - .setTypes("my-type1") - .addStoredField("field1") - .addStoredField("_routing") - .get(); + SearchResponse searchResponse = client().prepareSearch("my-index").addStoredField("field1").addStoredField("_routing").get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).field("field1"), nullValue()); @@ -749,13 +756,14 @@ public void testSearchFieldsMetadata() throws Exception { } public void testSearchFieldsNonLeafField() throws Exception { - client().prepareIndex("my-index", "my-type1", "1") + client().prepareIndex("my-index") + .setId("1") .setSource(jsonBuilder().startObject().startObject("field1").field("field2", "value1").endObject().endObject()) .setRefreshPolicy(IMMEDIATE) .get(); assertFailures( - client().prepareSearch("my-index").setTypes("my-type1").addStoredField("field1"), + client().prepareSearch("my-index").addStoredField("field1"), RestStatus.BAD_REQUEST, containsString("field [field1] isn't a leaf field") ); @@ -767,9 +775,9 @@ public void testGetFieldsComplexField() throws Exception { .prepareCreate("my-index") .setSettings(Settings.builder().put("index.refresh_interval", -1)) .addMapping( - "doc", + MapperService.SINGLE_MAPPING_NAME, jsonBuilder().startObject() - .startObject("doc") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("field1") .field("type", "object") @@ -821,7 +829,7 @@ public void testGetFieldsComplexField() throws Exception { .endObject() ); - client().prepareIndex("my-index", "doc", "1").setRefreshPolicy(IMMEDIATE).setSource(source, XContentType.JSON).get(); + client().prepareIndex("my-index").setId("1").setRefreshPolicy(IMMEDIATE).setSource(source, XContentType.JSON).get(); String field = "field1.field2.field3.field4"; @@ -835,10 +843,9 @@ public void testGetFieldsComplexField() throws Exception { // see #8203 public void testSingleValueFieldDatatField() throws ExecutionException, InterruptedException { assertAcked(client().admin().indices().prepareCreate("test").addMapping("type", "test_field", "type=keyword").get()); - indexRandom(true, client().prepareIndex("test", "type", "1").setSource("test_field", "foobar")); + indexRandom(true, client().prepareIndex("test").setId("1").setSource("test_field", "foobar")); refresh(); SearchResponse searchResponse = client().prepareSearch("test") - .setTypes("type") .setSource(new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).docValueField("test_field")) .get(); assertHitCount(searchResponse, 1); @@ -852,7 +859,7 @@ public void testDocValueFields() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("_source") .field("enabled", false) .endObject() @@ -900,10 +907,11 @@ public void testDocValueFields() throws Exception { .endObject() ); - client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).get(); + client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get(); ZonedDateTime date = ZonedDateTime.of(2012, 3, 22, 0, 0, 0, 0, ZoneOffset.UTC); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("text_field", "foo") @@ -1129,7 +1137,8 @@ public void testScriptFields() throws Exception { List reqs = new ArrayList<>(); for (int i = 0; i < numDocs; ++i) { reqs.add( - client().prepareIndex("index", "type", Integer.toString(i)) + client().prepareIndex("index") + .setId(Integer.toString(i)) .setSource( "s", Integer.toString(i), @@ -1172,7 +1181,7 @@ public void testScriptFields() throws Exception { public void testDocValueFieldsWithFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("_source") .field("enabled", false) .endObject() @@ -1196,13 +1205,13 @@ public void testDocValueFieldsWithFieldAlias() throws Exception { .endObject() .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping("type", mapping)); + assertAcked(prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME, mapping)); ensureGreen("test"); DateTime date = new DateTime(1990, 12, 29, 0, 0, DateTimeZone.UTC); org.joda.time.format.DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd"); - index("test", "type", "1", "text_field", "foo", "date_field", formatter.print(date)); + index("test", MapperService.SINGLE_MAPPING_NAME, "1", "text_field", "foo", "date_field", formatter.print(date)); refresh("test"); SearchRequestBuilder builder = client().prepareSearch() @@ -1235,7 +1244,7 @@ public void testDocValueFieldsWithFieldAlias() throws Exception { public void testWildcardDocValueFieldsWithFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("_source") .field("enabled", false) .endObject() @@ -1259,13 +1268,13 @@ public void testWildcardDocValueFieldsWithFieldAlias() throws Exception { .endObject() .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping("type", mapping)); + assertAcked(prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME, mapping)); ensureGreen("test"); DateTime date = new DateTime(1990, 12, 29, 0, 0, DateTimeZone.UTC); org.joda.time.format.DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd"); - index("test", "type", "1", "text_field", "foo", "date_field", formatter.print(date)); + index("test", MapperService.SINGLE_MAPPING_NAME, "1", "text_field", "foo", "date_field", formatter.print(date)); refresh("test"); SearchRequestBuilder builder = client().prepareSearch() @@ -1297,7 +1306,7 @@ public void testWildcardDocValueFieldsWithFieldAlias() throws Exception { public void testStoredFieldsWithFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("field1") .field("type", "text") @@ -1318,9 +1327,9 @@ public void testStoredFieldsWithFieldAlias() throws Exception { .endObject() .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping("type", mapping)); + assertAcked(prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME, mapping)); - index("test", "type", "1", "field1", "value1", "field2", "value2"); + index("test", MapperService.SINGLE_MAPPING_NAME, "1", "field1", "value1", "field2", "value2"); refresh("test"); SearchResponse searchResponse = client().prepareSearch() @@ -1341,7 +1350,7 @@ public void testStoredFieldsWithFieldAlias() throws Exception { public void testWildcardStoredFieldsWithFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("field1") .field("type", "text") @@ -1362,9 +1371,9 @@ public void testWildcardStoredFieldsWithFieldAlias() throws Exception { .endObject() .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping("type", mapping)); + assertAcked(prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME, mapping)); - index("test", "type", "1", "field1", "value1", "field2", "value2"); + index("test", MapperService.SINGLE_MAPPING_NAME, "1", "field1", "value1", "field2", "value2"); refresh("test"); SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field*").get(); @@ -1387,7 +1396,8 @@ public void testLoadMetadata() throws Exception { indexRandom( true, - client().prepareIndex("test", "doc", "1") + client().prepareIndex("test") + .setId("1") .setRouting("1") .setSource(jsonBuilder().startObject().field("field1", "value").endObject()) ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java index 2c77e3d1d44e3..712026eaf5c43 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java @@ -51,6 +51,7 @@ import org.opensearch.index.query.functionscore.FunctionScoreQueryBuilder.FilterFunctionBuilder; import org.opensearch.index.query.functionscore.ScoreFunctionBuilders; import org.opensearch.search.MultiValueMode; +import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.VersionUtils; @@ -77,7 +78,9 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertOrderedSearchHits; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchHits; import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; @@ -113,7 +116,6 @@ public void testDistanceScoreGeoLinGaussExp() throws Exception { List indexBuilders = new ArrayList<>(); indexBuilders.add( client().prepareIndex() - .setType("type1") .setId("1") .setIndex("test") .setSource( @@ -128,7 +130,6 @@ public void testDistanceScoreGeoLinGaussExp() throws Exception { ); indexBuilders.add( client().prepareIndex() - .setType("type1") .setId("2") .setIndex("test") .setSource( @@ -146,7 +147,6 @@ public void testDistanceScoreGeoLinGaussExp() throws Exception { for (int i = 1; i <= numDummyDocs; i++) { indexBuilders.add( client().prepareIndex() - .setType("type1") .setId(Integer.toString(i + 3)) .setIndex("test") .setSource( @@ -244,14 +244,12 @@ public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception { List indexBuilders = new ArrayList<>(); indexBuilders.add( client().prepareIndex() - .setType("type1") .setId("1") .setIndex("test") .setSource(jsonBuilder().startObject().field("test", "value").field("num", 0.5).endObject()) ); indexBuilders.add( client().prepareIndex() - .setType("type1") .setId("2") .setIndex("test") .setSource(jsonBuilder().startObject().field("test", "value").field("num", 1.7).endObject()) @@ -262,7 +260,6 @@ public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception { for (int i = 0; i < numDummyDocs; i++) { indexBuilders.add( client().prepareIndex() - .setType("type1") .setId(Integer.toString(i + 3)) .setIndex("test") .setSource(jsonBuilder().startObject().field("test", "value").field("num", 3.0 + i).endObject()) @@ -360,7 +357,6 @@ public void testBoostModeSettingWorks() throws Exception { List indexBuilders = new ArrayList<>(); indexBuilders.add( client().prepareIndex() - .setType("type1") .setId("1") .setIndex("test") .setSource( @@ -375,7 +371,6 @@ public void testBoostModeSettingWorks() throws Exception { ); indexBuilders.add( client().prepareIndex() - .setType("type1") .setId("2") .setIndex("test") .setSource( @@ -459,7 +454,6 @@ public void testParseGeoPoint() throws Exception { ); client().prepareIndex() - .setType("type1") .setId("1") .setIndex("test") .setSource( @@ -528,7 +522,6 @@ public void testCombineModes() throws Exception { ); client().prepareIndex() - .setType("type1") .setId("1") .setIndex("test") .setRefreshPolicy(IMMEDIATE) @@ -626,6 +619,76 @@ public void testCombineModes() throws Exception { } + public void testCombineModesExplain() throws Exception { + assertAcked( + prepareCreate("test").addMapping( + "type1", + jsonBuilder().startObject() + .startObject("type1") + .startObject("properties") + .startObject("test") + .field("type", "text") + .endObject() + .startObject("num") + .field("type", "double") + .endObject() + .endObject() + .endObject() + .endObject() + ) + ); + + client().prepareIndex() + .setId("1") + .setIndex("test") + .setRefreshPolicy(IMMEDIATE) + .setSource(jsonBuilder().startObject().field("test", "value value").field("num", 1.0).endObject()) + .get(); + + FunctionScoreQueryBuilder baseQuery = functionScoreQuery( + constantScoreQuery(termQuery("test", "value")).queryName("query1"), + ScoreFunctionBuilders.weightFactorFunction(2, "weight1") + ); + // decay score should return 0.5 for this function and baseQuery should return 2.0f as it's score + ActionFuture response = client().search( + searchRequest().searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().explain(true) + .query( + functionScoreQuery(baseQuery, gaussDecayFunction("num", 0.0, 1.0, null, 0.5, "func2")).boostMode( + CombineFunction.MULTIPLY + ) + ) + ) + ); + SearchResponse sr = response.actionGet(); + SearchHits sh = sr.getHits(); + assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat(sh.getAt(0).getExplanation().getDetails(), arrayWithSize(2)); + assertThat(sh.getAt(0).getExplanation().getDetails()[0].getDetails(), arrayWithSize(2)); + // "description": "ConstantScore(test:value) (_name: query1)" + assertThat( + sh.getAt(0).getExplanation().getDetails()[0].getDetails()[0].getDescription(), + equalTo("ConstantScore(test:value) (_name: query1)") + ); + assertThat(sh.getAt(0).getExplanation().getDetails()[0].getDetails()[1].getDetails(), arrayWithSize(2)); + assertThat(sh.getAt(0).getExplanation().getDetails()[0].getDetails()[1].getDetails()[0].getDetails(), arrayWithSize(2)); + // "description": "constant score 1.0(_name: func1) - no function provided" + assertThat( + sh.getAt(0).getExplanation().getDetails()[0].getDetails()[1].getDetails()[0].getDetails()[0].getDescription(), + equalTo("constant score 1.0(_name: weight1) - no function provided") + ); + // "description": "exp(-0.5*pow(MIN[Math.max(Math.abs(1.0(=doc value) - 0.0(=origin))) - 0.0(=offset), 0)],2.0)/0.7213475204444817, + // _name: func2)" + assertThat(sh.getAt(0).getExplanation().getDetails()[1].getDetails(), arrayWithSize(2)); + assertThat(sh.getAt(0).getExplanation().getDetails()[1].getDetails()[0].getDetails(), arrayWithSize(1)); + assertThat( + sh.getAt(0).getExplanation().getDetails()[1].getDetails()[0].getDetails()[0].getDescription(), + containsString("_name: func2") + ); + } + public void testExceptionThrownIfScaleLE0() throws Exception { assertAcked( prepareCreate("test").addMapping( @@ -645,14 +708,10 @@ public void testExceptionThrownIfScaleLE0() throws Exception { ) ); client().index( - indexRequest("test").type("type1") - .id("1") - .source(jsonBuilder().startObject().field("test", "value").field("num1", "2013-05-27").endObject()) + indexRequest("test").id("1").source(jsonBuilder().startObject().field("test", "value").field("num1", "2013-05-27").endObject()) ).actionGet(); client().index( - indexRequest("test").type("type1") - .id("2") - .source(jsonBuilder().startObject().field("test", "value").field("num1", "2013-05-28").endObject()) + indexRequest("test").id("2").source(jsonBuilder().startObject().field("test", "value").field("num1", "2013-05-28").endObject()) ).actionGet(); refresh(); @@ -690,13 +749,11 @@ public void testParseDateMath() throws Exception { ) ); client().index( - indexRequest("test").type("type1") - .id("1") + indexRequest("test").id("1") .source(jsonBuilder().startObject().field("test", "value").field("num1", System.currentTimeMillis()).endObject()) ).actionGet(); client().index( - indexRequest("test").type("type1") - .id("2") + indexRequest("test").id("2") .source( jsonBuilder().startObject() .field("test", "value") @@ -749,24 +806,18 @@ public void testValueMissingLin() throws Exception { ); client().index( - indexRequest("test").type("type1") - .id("1") + indexRequest("test").id("1") .source(jsonBuilder().startObject().field("test", "value").field("num1", "2013-05-27").field("num2", "1.0").endObject()) ).actionGet(); client().index( - indexRequest("test").type("type1") - .id("2") - .source(jsonBuilder().startObject().field("test", "value").field("num2", "1.0").endObject()) + indexRequest("test").id("2").source(jsonBuilder().startObject().field("test", "value").field("num2", "1.0").endObject()) ).actionGet(); client().index( - indexRequest("test").type("type1") - .id("3") + indexRequest("test").id("3") .source(jsonBuilder().startObject().field("test", "value").field("num1", "2013-05-30").field("num2", "1.0").endObject()) ).actionGet(); client().index( - indexRequest("test").type("type1") - .id("4") - .source(jsonBuilder().startObject().field("test", "value").field("num1", "2013-05-30").endObject()) + indexRequest("test").id("4").source(jsonBuilder().startObject().field("test", "value").field("num1", "2013-05-30").endObject()) ).actionGet(); refresh(); @@ -827,9 +878,7 @@ public void testDateWithoutOrigin() throws Exception { + "-" + String.format(Locale.ROOT, "%02d", docDate.getDayOfMonth()); client().index( - indexRequest("test").type("type1") - .id("1") - .source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject()) + indexRequest("test").id("1").source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject()) ).actionGet(); docDate = dt.minusDays(2); docDateString = docDate.getYear() @@ -838,9 +887,7 @@ public void testDateWithoutOrigin() throws Exception { + "-" + String.format(Locale.ROOT, "%02d", docDate.getDayOfMonth()); client().index( - indexRequest("test").type("type1") - .id("2") - .source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject()) + indexRequest("test").id("2").source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject()) ).actionGet(); docDate = dt.minusDays(3); docDateString = docDate.getYear() @@ -849,9 +896,7 @@ public void testDateWithoutOrigin() throws Exception { + "-" + String.format(Locale.ROOT, "%02d", docDate.getDayOfMonth()); client().index( - indexRequest("test").type("type1") - .id("3") - .source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject()) + indexRequest("test").id("3").source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject()) ).actionGet(); refresh(); @@ -918,7 +963,6 @@ public void testManyDocsLin() throws Exception { indexBuilders.add( client().prepareIndex() - .setType("type") .setId(Integer.toString(i)) .setIndex("test") .setSource( @@ -987,16 +1031,15 @@ public void testParsingExceptionIfFieldDoesNotExist() throws Exception { ); int numDocs = 2; client().index( - indexRequest("test").type("type") - .source( - jsonBuilder().startObject() - .field("test", "value") - .startObject("geo") - .field("lat", 1) - .field("lon", 2) - .endObject() - .endObject() - ) + indexRequest("test").source( + jsonBuilder().startObject() + .field("test", "value") + .startObject("geo") + .field("lat", 1) + .field("lon", 2) + .endObject() + .endObject() + ) ).actionGet(); refresh(); List lonlat = new ArrayList<>(); @@ -1040,8 +1083,7 @@ public void testParsingExceptionIfFieldTypeDoesNotMatch() throws Exception { ) ); client().index( - indexRequest("test").type("type") - .source(jsonBuilder().startObject().field("test", "value").field("num", Integer.toString(1)).endObject()) + indexRequest("test").source(jsonBuilder().startObject().field("test", "value").field("num", Integer.toString(1)).endObject()) ).actionGet(); refresh(); // so, we indexed a string field, but now we try to score a num field @@ -1079,9 +1121,8 @@ public void testNoQueryGiven() throws Exception { .endObject() ) ); - client().index( - indexRequest("test").type("type").source(jsonBuilder().startObject().field("test", "value").field("num", 1.0).endObject()) - ).actionGet(); + client().index(indexRequest("test").source(jsonBuilder().startObject().field("test", "value").field("num", 1.0).endObject())) + .actionGet(); refresh(); // so, we indexed a string field, but now we try to score a num field ActionFuture response = client().search( @@ -1119,7 +1160,6 @@ public void testMultiFieldOptions() throws Exception { // Index for testing MIN and MAX IndexRequestBuilder doc1 = client().prepareIndex() - .setType("type1") .setId("1") .setIndex("test") .setSource( @@ -1138,7 +1178,6 @@ public void testMultiFieldOptions() throws Exception { .endObject() ); IndexRequestBuilder doc2 = client().prepareIndex() - .setType("type1") .setId("2") .setIndex("test") .setSource( @@ -1192,14 +1231,12 @@ public void testMultiFieldOptions() throws Exception { // Now test AVG and SUM doc1 = client().prepareIndex() - .setType("type1") .setId("1") .setIndex("test") .setSource( jsonBuilder().startObject().field("test", "value").startArray("num").value(0.0).value(1.0).value(2.0).endArray().endObject() ); doc2 = client().prepareIndex() - .setType("type1") .setId("2") .setIndex("test") .setSource(jsonBuilder().startObject().field("test", "value").field("num", 1.0).endObject()); @@ -1231,4 +1268,132 @@ public void testMultiFieldOptions() throws Exception { sh = sr.getHits(); assertThat((double) (sh.getAt(0).getScore()), closeTo((sh.getAt(1).getScore()), 1.e-6d)); } + + public void testDistanceScoreGeoLinGaussExplain() throws Exception { + assertAcked( + prepareCreate("test").addMapping( + "type1", + jsonBuilder().startObject() + .startObject("type1") + .startObject("properties") + .startObject("test") + .field("type", "text") + .endObject() + .startObject("loc") + .field("type", "geo_point") + .endObject() + .endObject() + .endObject() + .endObject() + ) + ); + + List indexBuilders = new ArrayList<>(); + indexBuilders.add( + client().prepareIndex() + .setId("1") + .setIndex("test") + .setSource( + jsonBuilder().startObject() + .field("test", "value") + .startObject("loc") + .field("lat", 10) + .field("lon", 20) + .endObject() + .endObject() + ) + ); + indexBuilders.add( + client().prepareIndex() + .setId("2") + .setIndex("test") + .setSource( + jsonBuilder().startObject() + .field("test", "value") + .startObject("loc") + .field("lat", 11) + .field("lon", 22) + .endObject() + .endObject() + ) + ); + + indexRandom(true, indexBuilders); + + // Test Gauss + List lonlat = new ArrayList<>(); + lonlat.add(20f); + lonlat.add(11f); + + final String queryName = "query1"; + final String functionName = "func1"; + ActionFuture response = client().search( + searchRequest().searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().explain(true) + .query( + functionScoreQuery(baseQuery.queryName(queryName), gaussDecayFunction("loc", lonlat, "1000km", functionName)) + ) + ) + ); + SearchResponse sr = response.actionGet(); + SearchHits sh = sr.getHits(); + assertThat(sh.getTotalHits().value, equalTo(2L)); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat(sh.getAt(1).getId(), equalTo("2")); + assertExplain(queryName, functionName, sr); + + response = client().search( + searchRequest().searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().explain(true) + .query( + functionScoreQuery(baseQuery.queryName(queryName), linearDecayFunction("loc", lonlat, "1000km", functionName)) + ) + ) + ); + + sr = response.actionGet(); + sh = sr.getHits(); + assertThat(sh.getTotalHits().value, equalTo(2L)); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat(sh.getAt(1).getId(), equalTo("2")); + assertExplain(queryName, functionName, sr); + + response = client().search( + searchRequest().searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().explain(true) + .query( + functionScoreQuery( + baseQuery.queryName(queryName), + exponentialDecayFunction("loc", lonlat, "1000km", functionName) + ) + ) + ) + ); + + sr = response.actionGet(); + sh = sr.getHits(); + assertThat(sh.getTotalHits().value, equalTo(2L)); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat(sh.getAt(1).getId(), equalTo("2")); + assertExplain(queryName, functionName, sr); + } + + private void assertExplain(final String queryName, final String functionName, SearchResponse sr) { + SearchHit firstHit = sr.getHits().getAt(0); + assertThat(firstHit.getExplanation().getDetails(), arrayWithSize(2)); + // "description": "*:* (_name: query1)" + assertThat(firstHit.getExplanation().getDetails()[0].getDescription().toString(), containsString("_name: " + queryName)); + assertThat(firstHit.getExplanation().getDetails()[1].getDetails(), arrayWithSize(2)); + // "description": "random score function (seed: 12345678, field: _seq_no, _name: func1)" + assertThat(firstHit.getExplanation().getDetails()[1].getDetails()[0].getDetails(), arrayWithSize(1)); + // "description": "exp(-0.5*pow(MIN of: [Math.max(arcDistance(10.999999972991645, 21.99999994598329(=doc value),11.0, 20.0(=origin)) + // - 0.0(=offset), 0)],2.0)/7.213475204444817E11, _name: func1)" + assertThat( + firstHit.getExplanation().getDetails()[1].getDetails()[0].getDetails()[0].getDescription().toString(), + containsString("_name: " + functionName) + ); + } } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/ExplainableScriptIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/ExplainableScriptIT.java index f577636d18d4b..f67b913a75871 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/ExplainableScriptIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/ExplainableScriptIT.java @@ -38,6 +38,7 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.SearchType; import org.opensearch.common.lucene.search.function.CombineFunction; +import org.opensearch.common.lucene.search.function.Functions; import org.opensearch.common.settings.Settings; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.plugins.Plugin; @@ -72,6 +73,7 @@ import static org.opensearch.index.query.QueryBuilders.termQuery; import static org.opensearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; import static org.opensearch.search.builder.SearchSourceBuilder.searchSource; +import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -121,8 +123,17 @@ static class MyScript extends ScoreScript implements ExplainableScoreScript { @Override public Explanation explain(Explanation subQueryScore) throws IOException { + return explain(subQueryScore, null); + } + + @Override + public Explanation explain(Explanation subQueryScore, String functionName) throws IOException { Explanation scoreExp = Explanation.match(subQueryScore.getValue(), "_score: ", subQueryScore); - return Explanation.match((float) (execute(null)), "This script returned " + execute(null), scoreExp); + return Explanation.match( + (float) (execute(null)), + "This script" + Functions.nameOrEmptyFunc(functionName) + " returned " + execute(null), + scoreExp + ); } @Override @@ -140,7 +151,7 @@ public void testExplainScript() throws InterruptedException, IOException, Execut List indexRequests = new ArrayList<>(); for (int i = 0; i < 20; i++) { indexRequests.add( - client().prepareIndex("test", "type") + client().prepareIndex("test") .setId(Integer.toString(i)) .setSource(jsonBuilder().startObject().field("number_field", i).field("text", "text").endObject()) ); @@ -174,4 +185,36 @@ public void testExplainScript() throws InterruptedException, IOException, Execut idCounter--; } } + + public void testExplainScriptWithName() throws InterruptedException, IOException, ExecutionException { + List indexRequests = new ArrayList<>(); + indexRequests.add( + client().prepareIndex("test") + .setId(Integer.toString(1)) + .setSource(jsonBuilder().startObject().field("number_field", 1).field("text", "text").endObject()) + ); + indexRandom(true, true, indexRequests); + client().admin().indices().prepareRefresh().get(); + ensureYellow(); + SearchResponse response = client().search( + searchRequest().searchType(SearchType.QUERY_THEN_FETCH) + .source( + searchSource().explain(true) + .query( + functionScoreQuery( + termQuery("text", "text"), + scriptFunction(new Script(ScriptType.INLINE, "test", "explainable_script", Collections.emptyMap()), "func1") + ).boostMode(CombineFunction.REPLACE) + ) + ) + ).actionGet(); + + OpenSearchAssertions.assertNoFailures(response); + SearchHits hits = response.getHits(); + assertThat(hits.getTotalHits().value, equalTo(1L)); + assertThat(hits.getHits()[0].getId(), equalTo("1")); + assertThat(hits.getHits()[0].getExplanation().getDetails(), arrayWithSize(2)); + assertThat(hits.getHits()[0].getExplanation().getDetails()[0].getDescription(), containsString("_name: func1")); + } + } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java index 573bd7f75c266..8e0a14b7062a7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java @@ -35,10 +35,13 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.lucene.search.function.FieldValueFactorFunction; +import org.opensearch.search.SearchHit; import org.opensearch.test.OpenSearchIntegTestCase; import java.io.IOException; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.arrayWithSize; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; import static org.opensearch.index.query.QueryBuilders.functionScoreQuery; import static org.opensearch.index.query.QueryBuilders.matchAllQuery; @@ -71,9 +74,9 @@ public void testFieldValueFactor() throws IOException { ).get() ); - client().prepareIndex("test", "type1", "1").setSource("test", 5, "body", "foo").get(); - client().prepareIndex("test", "type1", "2").setSource("test", 17, "body", "foo").get(); - client().prepareIndex("test", "type1", "3").setSource("body", "bar").get(); + client().prepareIndex("test").setId("1").setSource("test", 5, "body", "foo").get(); + client().prepareIndex("test").setId("2").setSource("test", 17, "body", "foo").get(); + client().prepareIndex("test").setId("3").setSource("body", "bar").get(); refresh(); @@ -143,7 +146,7 @@ public void testFieldValueFactor() throws IOException { .get(); assertEquals(response.getHits().getAt(0).getScore(), response.getHits().getAt(2).getScore(), 0); - client().prepareIndex("test", "type1", "2").setSource("test", -1, "body", "foo").get(); + client().prepareIndex("test").setId("2").setSource("test", -1, "body", "foo").get(); refresh(); // -1 divided by 0 is infinity, which should provoke an exception. @@ -163,4 +166,47 @@ public void testFieldValueFactor() throws IOException { // locally, instead of just having failures } } + + public void testFieldValueFactorExplain() throws IOException { + assertAcked( + prepareCreate("test").addMapping( + "type1", + jsonBuilder().startObject() + .startObject("type1") + .startObject("properties") + .startObject("test") + .field("type", randomFrom(new String[] { "short", "float", "long", "integer", "double" })) + .endObject() + .startObject("body") + .field("type", "text") + .endObject() + .endObject() + .endObject() + .endObject() + ).get() + ); + + client().prepareIndex("test").setId("1").setSource("test", 5, "body", "foo").get(); + client().prepareIndex("test").setId("2").setSource("test", 17, "body", "foo").get(); + client().prepareIndex("test").setId("3").setSource("body", "bar").get(); + + refresh(); + + // document 2 scores higher because 17 > 5 + final String functionName = "func1"; + final String queryName = "query"; + SearchResponse response = client().prepareSearch("test") + .setExplain(true) + .setQuery( + functionScoreQuery(simpleQueryStringQuery("foo").queryName(queryName), fieldValueFactorFunction("test", functionName)) + ) + .get(); + assertOrderedSearchHits(response, "2", "1"); + SearchHit firstHit = response.getHits().getAt(0); + assertThat(firstHit.getExplanation().getDetails(), arrayWithSize(2)); + // "description": "sum of: (_name: query)" + assertThat(firstHit.getExplanation().getDetails()[0].getDescription(), containsString("_name: " + queryName)); + // "description": "field value function(_name: func1): none(doc['test'].value * factor=1.0)" + assertThat(firstHit.getExplanation().getDetails()[1].toString(), containsString("_name: " + functionName)); + } } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreIT.java index 8bb844d5edc68..3d24933f66d17 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreIT.java @@ -43,6 +43,7 @@ import org.opensearch.script.MockScriptPlugin; import org.opensearch.script.Script; import org.opensearch.script.ScriptType; +import org.opensearch.search.SearchHit; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.OpenSearchTestCase; @@ -66,6 +67,8 @@ import static org.opensearch.search.builder.SearchSourceBuilder.searchSource; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; +import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -140,6 +143,35 @@ public void testScriptScoresWithAgg() throws IOException { assertThat(((Terms) response.getAggregations().asMap().get("score_agg")).getBuckets().get(0).getDocCount(), is(1L)); } + public void testScriptScoresWithAggWithExplain() throws IOException { + createIndex(INDEX); + index(INDEX, TYPE, "1", jsonBuilder().startObject().field("dummy_field", 1).endObject()); + refresh(); + + Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "get score value", Collections.emptyMap()); + + SearchResponse response = client().search( + searchRequest().source( + searchSource().explain(true) + .query(functionScoreQuery(scriptFunction(script, "func1"), "query1")) + .aggregation(terms("score_agg").script(script)) + ) + ).actionGet(); + assertSearchResponse(response); + + final SearchHit firstHit = response.getHits().getAt(0); + assertThat(firstHit.getScore(), equalTo(1.0f)); + assertThat(firstHit.getExplanation().getDetails(), arrayWithSize(2)); + // "description": "*:* (_name: query1)" + assertThat(firstHit.getExplanation().getDetails()[0].getDescription(), containsString("_name: query1")); + assertThat(firstHit.getExplanation().getDetails()[1].getDetails(), arrayWithSize(2)); + // "description": "script score function(_name: func1), computed with script:\"Script{ ... }\"" + assertThat(firstHit.getExplanation().getDetails()[1].getDetails()[0].getDescription(), containsString("_name: func1")); + + assertThat(((Terms) response.getAggregations().asMap().get("score_agg")).getBuckets().get(0).getKeyAsString(), equalTo("1.0")); + assertThat(((Terms) response.getAggregations().asMap().get("score_agg")).getBuckets().get(0).getDocCount(), is(1L)); + } + public void testMinScoreFunctionScoreBasic() throws IOException { float score = randomValueOtherThanMany((f) -> Float.compare(f, 0) < 0, OpenSearchTestCase::randomFloat); float minScore = randomValueOtherThanMany((f) -> Float.compare(f, 0) < 0, OpenSearchTestCase::randomFloat); @@ -189,7 +221,7 @@ public void testMinScoreFunctionScoreManyDocsAndRandomMinScore() throws IOExcept int scoreOffset = randomIntBetween(0, 2 * numDocs); int minScore = randomIntBetween(0, 2 * numDocs); for (int i = 0; i < numDocs; i++) { - docs.add(client().prepareIndex(INDEX, TYPE, Integer.toString(i)).setSource("num", i + scoreOffset)); + docs.add(client().prepareIndex(INDEX).setId(Integer.toString(i)).setSource("num", i + scoreOffset)); } indexRandom(true, docs); Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "return (doc['num'].value)", Collections.emptyMap()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java index ca69c38d1fcda..885f1aa7ff7a0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java @@ -95,14 +95,10 @@ public void testPlugin() throws Exception { client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().get(); client().index( - indexRequest("test").type("type1") - .id("1") - .source(jsonBuilder().startObject().field("test", "value").field("num1", "2013-05-26").endObject()) + indexRequest("test").id("1").source(jsonBuilder().startObject().field("test", "value").field("num1", "2013-05-26").endObject()) ).actionGet(); client().index( - indexRequest("test").type("type1") - .id("2") - .source(jsonBuilder().startObject().field("test", "value").field("num1", "2013-05-27").endObject()) + indexRequest("test").id("2").source(jsonBuilder().startObject().field("test", "value").field("num1", "2013-05-27").endObject()) ).actionGet(); client().admin().indices().prepareRefresh().get(); @@ -175,7 +171,7 @@ public double evaluate(double value, double scale) { } @Override - public Explanation explainFunction(String distanceString, double distanceVal, double scale) { + public Explanation explainFunction(String distanceString, double distanceVal, double scale, String functionName) { return Explanation.match((float) distanceVal, "" + distanceVal); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java index 134c68538d15b..a21363e58949b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java @@ -92,7 +92,7 @@ public void testEnforceWindowSize() { // this int iters = scaledRandomIntBetween(10, 20); for (int i = 0; i < iters; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("f", Integer.toString(i)).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("f", Integer.toString(i)).get(); } refresh(); @@ -142,9 +142,10 @@ public void testRescorePhrase() throws Exception { ).setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1)) ); - client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "the quick lazy huge brown fox jumps over the tree ").get(); - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test").setId("1").setSource("field1", "the quick brown fox").get(); + client().prepareIndex("test").setId("2").setSource("field1", "the quick lazy huge brown fox jumps over the tree ").get(); + client().prepareIndex("test") + .setId("3") .setSource("field1", "quick huge brown", "field2", "the quick lazy huge brown fox jumps over the tree") .get(); refresh(); @@ -207,21 +208,21 @@ public void testMoreDocs() throws Exception { .setSettings(builder.put("index.number_of_shards", 1)) ); - client().prepareIndex("test", "type1", "1").setSource("field1", "massachusetts avenue boston massachusetts").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "lexington avenue boston massachusetts").get(); - client().prepareIndex("test", "type1", "3").setSource("field1", "boston avenue lexington massachusetts").get(); + client().prepareIndex("test").setId("1").setSource("field1", "massachusetts avenue boston massachusetts").get(); + client().prepareIndex("test").setId("2").setSource("field1", "lexington avenue boston massachusetts").get(); + client().prepareIndex("test").setId("3").setSource("field1", "boston avenue lexington massachusetts").get(); client().admin().indices().prepareRefresh("test").get(); - client().prepareIndex("test", "type1", "4").setSource("field1", "boston road lexington massachusetts").get(); - client().prepareIndex("test", "type1", "5").setSource("field1", "lexington street lexington massachusetts").get(); - client().prepareIndex("test", "type1", "6").setSource("field1", "massachusetts avenue lexington massachusetts").get(); - client().prepareIndex("test", "type1", "7").setSource("field1", "bosten street san franciso california").get(); + client().prepareIndex("test").setId("4").setSource("field1", "boston road lexington massachusetts").get(); + client().prepareIndex("test").setId("5").setSource("field1", "lexington street lexington massachusetts").get(); + client().prepareIndex("test").setId("6").setSource("field1", "massachusetts avenue lexington massachusetts").get(); + client().prepareIndex("test").setId("7").setSource("field1", "bosten street san franciso california").get(); client().admin().indices().prepareRefresh("test").get(); - client().prepareIndex("test", "type1", "8").setSource("field1", "hollywood boulevard los angeles california").get(); - client().prepareIndex("test", "type1", "9").setSource("field1", "1st street boston massachussetts").get(); - client().prepareIndex("test", "type1", "10").setSource("field1", "1st street boston massachusetts").get(); + client().prepareIndex("test").setId("8").setSource("field1", "hollywood boulevard los angeles california").get(); + client().prepareIndex("test").setId("9").setSource("field1", "1st street boston massachussetts").get(); + client().prepareIndex("test").setId("10").setSource("field1", "1st street boston massachusetts").get(); client().admin().indices().prepareRefresh("test").get(); - client().prepareIndex("test", "type1", "11").setSource("field1", "2st street boston massachusetts").get(); - client().prepareIndex("test", "type1", "12").setSource("field1", "3st street boston massachusetts").get(); + client().prepareIndex("test").setId("11").setSource("field1", "2st street boston massachusetts").get(); + client().prepareIndex("test").setId("12").setSource("field1", "3st street boston massachusetts").get(); client().admin().indices().prepareRefresh("test").get(); SearchResponse searchResponse = client().prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR)) @@ -302,11 +303,11 @@ public void testSmallRescoreWindow() throws Exception { .setSettings(builder.put("index.number_of_shards", 1)) ); - client().prepareIndex("test", "type1", "3").setSource("field1", "massachusetts").get(); - client().prepareIndex("test", "type1", "6").setSource("field1", "massachusetts avenue lexington massachusetts").get(); + client().prepareIndex("test").setId("3").setSource("field1", "massachusetts").get(); + client().prepareIndex("test").setId("6").setSource("field1", "massachusetts avenue lexington massachusetts").get(); client().admin().indices().prepareRefresh("test").get(); - client().prepareIndex("test", "type1", "1").setSource("field1", "lexington massachusetts avenue").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "lexington avenue boston massachusetts road").get(); + client().prepareIndex("test").setId("1").setSource("field1", "lexington massachusetts avenue").get(); + client().prepareIndex("test").setId("2").setSource("field1", "lexington avenue boston massachusetts road").get(); client().admin().indices().prepareRefresh("test").get(); SearchResponse searchResponse = client().prepareSearch() @@ -388,11 +389,11 @@ public void testRescorerMadeScoresWorse() throws Exception { .setSettings(builder.put("index.number_of_shards", 1)) ); - client().prepareIndex("test", "type1", "3").setSource("field1", "massachusetts").get(); - client().prepareIndex("test", "type1", "6").setSource("field1", "massachusetts avenue lexington massachusetts").get(); + client().prepareIndex("test").setId("3").setSource("field1", "massachusetts").get(); + client().prepareIndex("test").setId("6").setSource("field1", "massachusetts avenue lexington massachusetts").get(); client().admin().indices().prepareRefresh("test").get(); - client().prepareIndex("test", "type1", "1").setSource("field1", "lexington massachusetts avenue").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "lexington avenue boston massachusetts road").get(); + client().prepareIndex("test").setId("1").setSource("field1", "lexington massachusetts avenue").get(); + client().prepareIndex("test").setId("2").setSource("field1", "lexington avenue boston massachusetts road").get(); client().admin().indices().prepareRefresh("test").get(); SearchResponse searchResponse = client().prepareSearch() @@ -538,9 +539,10 @@ public void testExplain() throws Exception { ) ); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "the quick lazy huge brown fox jumps over the tree").get(); - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test").setId("1").setSource("field1", "the quick brown fox").get(); + client().prepareIndex("test").setId("2").setSource("field1", "the quick lazy huge brown fox jumps over the tree").get(); + client().prepareIndex("test") + .setId("3") .setSource("field1", "quick huge brown", "field2", "the quick lazy huge brown fox jumps over the tree") .get(); refresh(); @@ -800,7 +802,7 @@ private int indexRandomNumbers(String analyzer, int shards, boolean dummyDocs) t int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i)); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i)); } indexRandom(true, dummyDocs, docs); @@ -815,7 +817,7 @@ public void testFromSize() throws Exception { settings.put(SETTING_NUMBER_OF_REPLICAS, 0); assertAcked(prepareCreate("test").setSettings(settings)); for (int i = 0; i < 5; i++) { - client().prepareIndex("test", "type", "" + i).setSource("text", "hello world").get(); + client().prepareIndex("test").setId("" + i).setSource("text", "hello world").get(); } refresh(); @@ -831,7 +833,7 @@ public void testFromSize() throws Exception { public void testRescorePhaseWithInvalidSort() throws Exception { assertAcked(prepareCreate("test")); for (int i = 0; i < 5; i++) { - client().prepareIndex("test", "type", "" + i).setSource("number", 0).get(); + client().prepareIndex("test").setId("" + i).setSource("number", 0).get(); } refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java index ffdbdcdabec75..670f5e65eb575 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java @@ -63,6 +63,7 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -178,7 +179,8 @@ public void testScoreAccessWithinScript() throws Exception { int docCount = randomIntBetween(100, 200); for (int i = 0; i < docCount; i++) { - client().prepareIndex("test", "type", "" + i) + client().prepareIndex("test") + .setId("" + i) // we add 1 to the index field to make sure that the scripts below never compute log(0) .setSource("body", randomFrom(Arrays.asList("foo", "bar", "baz")), "index", i + 1) .get(); @@ -288,6 +290,37 @@ public void testSeedReportedInExplain() throws Exception { assertThat(firstHit.getExplanation().toString(), containsString("" + seed)); } + public void testSeedAndNameReportedInExplain() throws Exception { + createIndex("test"); + ensureGreen(); + index("test", "type", "1", jsonBuilder().startObject().endObject()); + flush(); + refresh(); + + int seed = 12345678; + + final String queryName = "query1"; + final String functionName = "func1"; + SearchResponse resp = client().prepareSearch("test") + .setQuery( + functionScoreQuery( + matchAllQuery().queryName(queryName), + randomFunction(functionName).seed(seed).setField(SeqNoFieldMapper.NAME) + ) + ) + .setExplain(true) + .get(); + assertNoFailures(resp); + assertEquals(1, resp.getHits().getTotalHits().value); + SearchHit firstHit = resp.getHits().getAt(0); + assertThat(firstHit.getExplanation().getDetails(), arrayWithSize(2)); + // "description": "*:* (_name: query1)" + assertThat(firstHit.getExplanation().getDetails()[0].getDescription().toString(), containsString("_name: " + queryName)); + assertThat(firstHit.getExplanation().getDetails()[1].getDetails(), arrayWithSize(2)); + // "description": "random score function (seed: 12345678, field: _seq_no, _name: func1)" + assertThat(firstHit.getExplanation().getDetails()[1].getDetails()[0].getDescription().toString(), containsString("seed: " + seed)); + } + public void testNoDocs() throws Exception { createIndex("test"); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoBoundingBoxQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoBoundingBoxQueryIT.java index ece21899fbea5..f865e56cd99e3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoBoundingBoxQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoBoundingBoxQueryIT.java @@ -72,7 +72,8 @@ public void testSimpleBoundingBoxTest() throws Exception { assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("name", "New York") @@ -85,7 +86,8 @@ public void testSimpleBoundingBoxTest() throws Exception { .get(); // to NY: 5.286 km - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("name", "Times Square") @@ -98,7 +100,8 @@ public void testSimpleBoundingBoxTest() throws Exception { .get(); // to NY: 0.4621 km - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("name", "Tribeca") @@ -111,7 +114,8 @@ public void testSimpleBoundingBoxTest() throws Exception { .get(); // to NY: 1.055 km - client().prepareIndex("test", "type1", "4") + client().prepareIndex("test") + .setId("4") .setSource( jsonBuilder().startObject() .field("name", "Wall Street") @@ -124,7 +128,8 @@ public void testSimpleBoundingBoxTest() throws Exception { .get(); // to NY: 1.258 km - client().prepareIndex("test", "type1", "5") + client().prepareIndex("test") + .setId("5") .setSource( jsonBuilder().startObject() .field("name", "Soho") @@ -137,7 +142,8 @@ public void testSimpleBoundingBoxTest() throws Exception { .get(); // to NY: 2.029 km - client().prepareIndex("test", "type1", "6") + client().prepareIndex("test") + .setId("6") .setSource( jsonBuilder().startObject() .field("name", "Greenwich Village") @@ -150,7 +156,8 @@ public void testSimpleBoundingBoxTest() throws Exception { .get(); // to NY: 8.572 km - client().prepareIndex("test", "type1", "7") + client().prepareIndex("test") + .setId("7") .setSource( jsonBuilder().startObject() .field("name", "Brooklyn") @@ -196,7 +203,8 @@ public void testLimit2BoundingBox() throws Exception { assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("userid", 880) @@ -210,7 +218,8 @@ public void testLimit2BoundingBox() throws Exception { .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("userid", 534) @@ -274,7 +283,8 @@ public void testCompleteLonRange() throws Exception { assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("userid", 880) @@ -288,7 +298,8 @@ public void testCompleteLonRange() throws Exception { .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("userid", 534) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoDistanceIT.java index a03f3a5f62343..d00c0a8c0faf7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoDistanceIT.java @@ -134,7 +134,8 @@ public void setupTestIndex() throws IOException { } public void testDistanceScript() throws Exception { - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("name", "TestPosition") @@ -202,7 +203,8 @@ public void testDistanceScript() throws Exception { } public void testGeoDistanceAggregation() throws IOException { - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("name", "TestPosition") @@ -220,7 +222,6 @@ public void testGeoDistanceAggregation() throws IOException { String name = "TestPosition"; search.setQuery(QueryBuilders.matchAllQuery()) - .setTypes("type1") .addAggregation( AggregationBuilders.geoDistance(name, new GeoPoint(tgt_lat, tgt_lon)) .field("location") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java index 0e412a51ee77f..8322c9704eecb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java @@ -214,7 +214,6 @@ public void testShapeRelations() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("polygon") .startObject("properties") .startObject("area") .field("type", "geo_shape") @@ -222,13 +221,9 @@ public void testShapeRelations() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - CreateIndexRequestBuilder mappingRequest = client().admin() - .indices() - .prepareCreate("shapes") - .addMapping("polygon", mapping, XContentType.JSON); + CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("shapes").setMapping(mapping); mappingRequest.get(); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().get(); @@ -249,7 +244,7 @@ public void testShapeRelations() throws Exception { ); BytesReference data = BytesReference.bytes(jsonBuilder().startObject().field("area", polygon).endObject()); - client().prepareIndex("shapes", "polygon", "1").setSource(data, XContentType.JSON).get(); + client().prepareIndex("shapes").setId("1").setSource(data, XContentType.JSON).get(); client().admin().indices().prepareRefresh().get(); // Point in polygon @@ -312,7 +307,7 @@ public void testShapeRelations() throws Exception { ); data = BytesReference.bytes(jsonBuilder().startObject().field("area", inverse).endObject()); - client().prepareIndex("shapes", "polygon", "2").setSource(data, XContentType.JSON).get(); + client().prepareIndex("shapes").setId("2").setSource(data, XContentType.JSON).get(); client().admin().indices().prepareRefresh().get(); // re-check point on polygon hole @@ -351,7 +346,7 @@ public void testShapeRelations() throws Exception { ); data = BytesReference.bytes(jsonBuilder().startObject().field("area", builder).endObject()); - client().prepareIndex("shapes", "polygon", "1").setSource(data, XContentType.JSON).get(); + client().prepareIndex("shapes").setId("1").setSource(data, XContentType.JSON).get(); client().admin().indices().prepareRefresh().get(); // Create a polygon crossing longitude 180 with hole. @@ -364,7 +359,7 @@ public void testShapeRelations() throws Exception { ); data = BytesReference.bytes(jsonBuilder().startObject().field("area", builder).endObject()); - client().prepareIndex("shapes", "polygon", "1").setSource(data, XContentType.JSON).get(); + client().prepareIndex("shapes").setId("1").setSource(data, XContentType.JSON).get(); client().admin().indices().prepareRefresh().get(); result = client().prepareSearch() @@ -413,7 +408,7 @@ public void testBulk() throws Exception { .endObject(); client().admin().indices().prepareCreate("countries").setSettings(settings).addMapping("country", xContentBuilder).get(); - BulkResponse bulk = client().prepareBulk().add(bulkAction, 0, bulkAction.length, null, null, xContentBuilder.contentType()).get(); + BulkResponse bulk = client().prepareBulk().add(bulkAction, 0, bulkAction.length, null, xContentBuilder.contentType()).get(); for (BulkItemResponse item : bulk.getItems()) { assertFalse("unable to index data", item.isFailed()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java index 42edaddee99a2..c2d75b6aa55af 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java @@ -73,7 +73,8 @@ protected void setupSuiteScopeCluster() throws Exception { indexRandom( true, - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("name", "New York") @@ -84,7 +85,8 @@ protected void setupSuiteScopeCluster() throws Exception { .endObject() ), // to NY: 5.286 km - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("name", "Times Square") @@ -95,7 +97,8 @@ protected void setupSuiteScopeCluster() throws Exception { .endObject() ), // to NY: 0.4621 km - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("name", "Tribeca") @@ -106,7 +109,8 @@ protected void setupSuiteScopeCluster() throws Exception { .endObject() ), // to NY: 1.055 km - client().prepareIndex("test", "type1", "4") + client().prepareIndex("test") + .setId("4") .setSource( jsonBuilder().startObject() .field("name", "Wall Street") @@ -117,7 +121,8 @@ protected void setupSuiteScopeCluster() throws Exception { .endObject() ), // to NY: 1.258 km - client().prepareIndex("test", "type1", "5") + client().prepareIndex("test") + .setId("5") .setSource( jsonBuilder().startObject() .field("name", "Soho") @@ -128,7 +133,8 @@ protected void setupSuiteScopeCluster() throws Exception { .endObject() ), // to NY: 2.029 km - client().prepareIndex("test", "type1", "6") + client().prepareIndex("test") + .setId("6") .setSource( jsonBuilder().startObject() .field("name", "Greenwich Village") @@ -139,7 +145,8 @@ protected void setupSuiteScopeCluster() throws Exception { .endObject() ), // to NY: 8.572 km - client().prepareIndex("test", "type1", "7") + client().prepareIndex("test") + .setId("7") .setSource( jsonBuilder().startObject() .field("name", "Brooklyn") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java index c37fe30c8311a..7315155e39520 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java @@ -76,7 +76,6 @@ public void testOrientationPersistence() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("shape") .startObject("properties") .startObject("location") .field("type", "geo_shape") @@ -84,16 +83,14 @@ public void testOrientationPersistence() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); // create index - assertAcked(prepareCreate(idxName).addMapping("shape", mapping, XContentType.JSON)); + assertAcked(prepareCreate(idxName).setMapping(mapping)); mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("shape") .startObject("properties") .startObject("location") .field("type", "geo_shape") @@ -101,10 +98,9 @@ public void testOrientationPersistence() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(prepareCreate(idxName + "2").addMapping("shape", mapping, XContentType.JSON)); + assertAcked(prepareCreate(idxName + "2").setMapping(mapping)); ensureGreen(idxName, idxName + "2"); internalCluster().fullRestart(); @@ -185,7 +181,7 @@ public void testIgnoreMalformed() throws Exception { .endObject() ); - indexRandom(true, client().prepareIndex("test", "geometry", "0").setSource("shape", polygonGeoJson)); + indexRandom(true, client().prepareIndex("test").setId("0").setSource("shape", polygonGeoJson)); SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); } @@ -206,7 +202,7 @@ public void testMappingUpdate() throws Exception { IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> client().admin().indices().preparePutMapping("test").setType("geometry").setSource(update, XContentType.JSON).get() + () -> client().admin().indices().preparePutMapping("test").setSource(update, XContentType.JSON).get() ); assertThat(e.getMessage(), containsString("using [BKD] strategy cannot be merged with")); } @@ -227,7 +223,7 @@ public void testIndexShapeRouting() throws Exception { + " }"; // create index - assertAcked(client().admin().indices().prepareCreate("test").addMapping("doc", mapping, XContentType.JSON).get()); + assertAcked(client().admin().indices().prepareCreate("test").setMapping(mapping).get()); ensureGreen(); String source = "{\n" @@ -237,10 +233,10 @@ public void testIndexShapeRouting() throws Exception { + " }\n" + "}"; - indexRandom(true, client().prepareIndex("test", "doc", "0").setSource(source, XContentType.JSON).setRouting("ABC")); + indexRandom(true, client().prepareIndex("test").setId("0").setSource(source, XContentType.JSON).setRouting("ABC")); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(geoShapeQuery("shape", "0", "doc").indexedShapeIndex("test").indexedShapeRouting("ABC")) + .setQuery(geoShapeQuery("shape", "0").indexedShapeIndex("test").indexedShapeRouting("ABC")) .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); @@ -265,16 +261,16 @@ public void testIndexPolygonDateLine() throws Exception { + " }"; // create index - assertAcked(client().admin().indices().prepareCreate("vector").addMapping("doc", mappingVector, XContentType.JSON).get()); + assertAcked(client().admin().indices().prepareCreate("vector").setMapping(mappingVector).get()); ensureGreen(); - assertAcked(client().admin().indices().prepareCreate("quad").addMapping("doc", mappingQuad, XContentType.JSON).get()); + assertAcked(client().admin().indices().prepareCreate("quad").setMapping(mappingQuad).get()); ensureGreen(); String source = "{\n" + " \"shape\" : \"POLYGON((179 0, -179 0, -179 2, 179 2, 179 0))\"" + "}"; - indexRandom(true, client().prepareIndex("quad", "doc", "0").setSource(source, XContentType.JSON)); - indexRandom(true, client().prepareIndex("vector", "doc", "0").setSource(source, XContentType.JSON)); + indexRandom(true, client().prepareIndex("quad").setId("0").setSource(source, XContentType.JSON)); + indexRandom(true, client().prepareIndex("vector").setId("0").setSource(source, XContentType.JSON)); try { ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java index 7f9f3c818f27f..28b00acd21479 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java @@ -68,7 +68,6 @@ public void testOrientationPersistence() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("shape") .startObject("properties") .startObject("location") .field("type", "geo_shape") @@ -77,16 +76,14 @@ public void testOrientationPersistence() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); // create index - assertAcked(prepareCreate(idxName).addMapping("shape", mapping, XContentType.JSON)); + assertAcked(prepareCreate(idxName).setMapping(mapping)); mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("shape") .startObject("properties") .startObject("location") .field("type", "geo_shape") @@ -95,10 +92,9 @@ public void testOrientationPersistence() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(prepareCreate(idxName + "2").addMapping("shape", mapping, XContentType.JSON)); + assertAcked(prepareCreate(idxName + "2").setMapping(mapping)); ensureGreen(idxName, idxName + "2"); internalCluster().fullRestart(); @@ -183,7 +179,7 @@ public void testIgnoreMalformed() throws Exception { .endObject() ); - indexRandom(true, client().prepareIndex("test", "geometry", "0").setSource("shape", polygonGeoJson)); + indexRandom(true, client().prepareIndex("test").setId("0").setSource("shape", polygonGeoJson)); SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); } @@ -205,7 +201,7 @@ public void testIndexShapeRouting() throws Exception { + " }"; // create index - assertAcked(client().admin().indices().prepareCreate("test").addMapping("doc", mapping, XContentType.JSON).get()); + assertAcked(client().admin().indices().prepareCreate("test").setMapping(mapping).get()); ensureGreen(); String source = "{\n" @@ -215,10 +211,10 @@ public void testIndexShapeRouting() throws Exception { + " }\n" + "}"; - indexRandom(true, client().prepareIndex("test", "doc", "0").setSource(source, XContentType.JSON).setRouting("ABC")); + indexRandom(true, client().prepareIndex("test").setId("0").setSource(source, XContentType.JSON).setRouting("ABC")); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(geoShapeQuery("shape", "0", "doc").indexedShapeIndex("test").indexedShapeRouting("ABC")) + .setQuery(geoShapeQuery("shape", "0").indexedShapeIndex("test").indexedShapeRouting("ABC")) .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); @@ -238,7 +234,7 @@ public void testLegacyCircle() throws Exception { ); ensureGreen(); - indexRandom(true, client().prepareIndex("test", "_doc", "0").setSource("shape", (ToXContent) (builder, params) -> { + indexRandom(true, client().prepareIndex("test").setId("0").setSource("shape", (ToXContent) (builder, params) -> { builder.startObject() .field("type", "circle") .startArray("coordinates") @@ -267,7 +263,7 @@ public void testDisallowExpensiveQueries() throws InterruptedException, IOExcept ); ensureGreen(); - indexRandom(true, client().prepareIndex("test", "_doc").setId("0").setSource("shape", (ToXContent) (builder, params) -> { + indexRandom(true, client().prepareIndex("test").setId("0").setSource("shape", (ToXContent) (builder, params) -> { builder.startObject() .field("type", "circle") .startArray("coordinates") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java index 3dac4402a1ffc..7ffd648d06611 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java @@ -42,7 +42,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.MoreLikeThisQueryBuilder; import org.opensearch.index.query.MoreLikeThisQueryBuilder.Item; import org.opensearch.index.query.QueryBuilder; @@ -104,11 +104,9 @@ public void testSimpleMoreLikeThis() throws Exception { assertThat(ensureGreen(), equalTo(ClusterHealthStatus.GREEN)); logger.info("Indexing..."); - client().index(indexRequest("test").type("type1").id("1").source(jsonBuilder().startObject().field("text", "lucene").endObject())) + client().index(indexRequest("test").id("1").source(jsonBuilder().startObject().field("text", "lucene").endObject())).actionGet(); + client().index(indexRequest("test").id("2").source(jsonBuilder().startObject().field("text", "lucene release").endObject())) .actionGet(); - client().index( - indexRequest("test").type("type1").id("2").source(jsonBuilder().startObject().field("text", "lucene release").endObject()) - ).actionGet(); client().admin().indices().refresh(refreshRequest()).actionGet(); logger.info("Running moreLikeThis"); @@ -139,16 +137,14 @@ public void testSimpleMoreLikeThisWithTypes() throws Exception { assertThat(ensureGreen(), equalTo(ClusterHealthStatus.GREEN)); logger.info("Indexing..."); - client().index(indexRequest("test").type("type1").id("1").source(jsonBuilder().startObject().field("text", "lucene").endObject())) + client().index(indexRequest("test").id("1").source(jsonBuilder().startObject().field("text", "lucene").endObject())).actionGet(); + client().index(indexRequest("test").id("2").source(jsonBuilder().startObject().field("text", "lucene release").endObject())) .actionGet(); - client().index( - indexRequest("test").type("type1").id("2").source(jsonBuilder().startObject().field("text", "lucene release").endObject()) - ).actionGet(); client().admin().indices().refresh(refreshRequest()).actionGet(); logger.info("Running moreLikeThis"); SearchResponse response = client().prepareSearch() - .setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "type1", "1") }).minTermFreq(1).minDocFreq(1)) + .setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1)) .get(); assertHitCount(response, 1L); } @@ -176,14 +172,10 @@ public void testMoreLikeThisForZeroTokensInOneOfTheAnalyzedFields() throws Excep ensureGreen(); client().index( - indexRequest("test").type("type") - .id("1") - .source(jsonBuilder().startObject().field("myField", "and_foo").field("empty", "").endObject()) + indexRequest("test").id("1").source(jsonBuilder().startObject().field("myField", "and_foo").field("empty", "").endObject()) ).actionGet(); client().index( - indexRequest("test").type("type") - .id("2") - .source(jsonBuilder().startObject().field("myField", "and_foo").field("empty", "").endObject()) + indexRequest("test").id("2").source(jsonBuilder().startObject().field("myField", "and_foo").field("empty", "").endObject()) ).actionGet(); client().admin().indices().refresh(refreshRequest()).actionGet(); @@ -205,13 +197,10 @@ public void testSimpleMoreLikeOnLongField() throws Exception { assertThat(ensureGreen(), equalTo(ClusterHealthStatus.GREEN)); logger.info("Indexing..."); - client().index( - indexRequest("test").type("type1").id("1").source(jsonBuilder().startObject().field("some_long", 1367484649580L).endObject()) - ).actionGet(); - client().index(indexRequest("test").type("type1").id("2").source(jsonBuilder().startObject().field("some_long", 0).endObject())) - .actionGet(); - client().index(indexRequest("test").type("type1").id("3").source(jsonBuilder().startObject().field("some_long", -666).endObject())) + client().index(indexRequest("test").id("1").source(jsonBuilder().startObject().field("some_long", 1367484649580L).endObject())) .actionGet(); + client().index(indexRequest("test").id("2").source(jsonBuilder().startObject().field("some_long", 0).endObject())).actionGet(); + client().index(indexRequest("test").id("3").source(jsonBuilder().startObject().field("some_long", -666).endObject())).actionGet(); client().admin().indices().refresh(refreshRequest()).actionGet(); @@ -250,18 +239,14 @@ public void testMoreLikeThisWithAliases() throws Exception { assertThat(ensureGreen(), equalTo(ClusterHealthStatus.GREEN)); logger.info("Indexing..."); - client().index( - indexRequest("test").type("type1").id("1").source(jsonBuilder().startObject().field("text", "lucene beta").endObject()) - ).actionGet(); - client().index( - indexRequest("test").type("type1").id("2").source(jsonBuilder().startObject().field("text", "lucene release").endObject()) - ).actionGet(); - client().index( - indexRequest("test").type("type1").id("3").source(jsonBuilder().startObject().field("text", "opensearch beta").endObject()) - ).actionGet(); - client().index( - indexRequest("test").type("type1").id("4").source(jsonBuilder().startObject().field("text", "opensearch release").endObject()) - ).actionGet(); + client().index(indexRequest("test").id("1").source(jsonBuilder().startObject().field("text", "lucene beta").endObject())) + .actionGet(); + client().index(indexRequest("test").id("2").source(jsonBuilder().startObject().field("text", "lucene release").endObject())) + .actionGet(); + client().index(indexRequest("test").id("3").source(jsonBuilder().startObject().field("text", "opensearch beta").endObject())) + .actionGet(); + client().index(indexRequest("test").id("4").source(jsonBuilder().startObject().field("text", "opensearch release").endObject())) + .actionGet(); client().admin().indices().refresh(refreshRequest()).actionGet(); logger.info("Running moreLikeThis on index"); @@ -297,71 +282,63 @@ public void testMoreLikeThisWithAliases() throws Exception { public void testMoreLikeThisWithAliasesInLikeDocuments() throws Exception { String indexName = "foo"; String aliasName = "foo_name"; - String typeName = "bar"; - String mapping = Strings.toString( - XContentFactory.jsonBuilder().startObject().startObject("bar").startObject("properties").endObject().endObject().endObject() - ); - client().admin().indices().prepareCreate(indexName).addMapping(typeName, mapping, XContentType.JSON).get(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject()); + client().admin().indices().prepareCreate(indexName).setMapping(mapping).get(); client().admin().indices().prepareAliases().addAlias(indexName, aliasName).get(); assertThat(ensureGreen(), equalTo(ClusterHealthStatus.GREEN)); - client().index( - indexRequest(indexName).type(typeName).id("1").source(jsonBuilder().startObject().field("text", "opensearch index").endObject()) - ).actionGet(); - client().index( - indexRequest(indexName).type(typeName).id("2").source(jsonBuilder().startObject().field("text", "lucene index").endObject()) - ).actionGet(); - client().index( - indexRequest(indexName).type(typeName).id("3").source(jsonBuilder().startObject().field("text", "opensearch index").endObject()) - ).actionGet(); + client().index(indexRequest(indexName).id("1").source(jsonBuilder().startObject().field("text", "opensearch index").endObject())) + .actionGet(); + client().index(indexRequest(indexName).id("2").source(jsonBuilder().startObject().field("text", "lucene index").endObject())) + .actionGet(); + client().index(indexRequest(indexName).id("3").source(jsonBuilder().startObject().field("text", "opensearch index").endObject())) + .actionGet(); refresh(indexName); SearchResponse response = client().prepareSearch() - .setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item(aliasName, typeName, "1") }).minTermFreq(1).minDocFreq(1)) + .setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item(aliasName, "1") }).minTermFreq(1).minDocFreq(1)) .get(); assertHitCount(response, 2L); assertThat(response.getHits().getAt(0).getId(), equalTo("3")); } public void testMoreLikeThisIssue2197() throws Exception { - String mapping = Strings.toString( - XContentFactory.jsonBuilder().startObject().startObject("bar").startObject("properties").endObject().endObject().endObject() - ); - client().admin().indices().prepareCreate("foo").addMapping("bar", mapping, XContentType.JSON).get(); - client().prepareIndex("foo", "bar", "1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject()); + client().admin().indices().prepareCreate("foo").setMapping(mapping).get(); + client().prepareIndex("foo") + .setId("1") .setSource(jsonBuilder().startObject().startObject("foo").field("bar", "boz").endObject().endObject()) .get(); client().admin().indices().prepareRefresh("foo").get(); assertThat(ensureGreen(), equalTo(ClusterHealthStatus.GREEN)); SearchResponse response = client().prepareSearch() - .setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "bar", "1") })) + .setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "1") })) .get(); assertNoFailures(response); assertThat(response, notNullValue()); - response = client().prepareSearch().setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "bar", "1") })).get(); + response = client().prepareSearch().setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "1") })).get(); assertNoFailures(response); assertThat(response, notNullValue()); } // Issue #2489 public void testMoreLikeWithCustomRouting() throws Exception { - String mapping = Strings.toString( - XContentFactory.jsonBuilder().startObject().startObject("bar").startObject("properties").endObject().endObject().endObject() - ); - client().admin().indices().prepareCreate("foo").addMapping("bar", mapping, XContentType.JSON).get(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject()); + client().admin().indices().prepareCreate("foo").setMapping(mapping).get(); ensureGreen(); - client().prepareIndex("foo", "bar", "1") + client().prepareIndex("foo") + .setId("1") .setSource(jsonBuilder().startObject().startObject("foo").field("bar", "boz").endObject().endObject()) .setRouting("2") .get(); client().admin().indices().prepareRefresh("foo").get(); SearchResponse response = client().prepareSearch() - .setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "bar", "1").routing("2") })) + .setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "1").routing("2") })) .get(); assertNoFailures(response); assertThat(response, notNullValue()); @@ -369,25 +346,22 @@ public void testMoreLikeWithCustomRouting() throws Exception { // Issue #3039 public void testMoreLikeThisIssueRoutingNotSerialized() throws Exception { - String mapping = Strings.toString( - XContentFactory.jsonBuilder().startObject().startObject("bar").startObject("properties").endObject().endObject().endObject() - ); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject()); assertAcked( - prepareCreate("foo", 2, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0)).addMapping( - "bar", - mapping, - XContentType.JSON + prepareCreate("foo", 2, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0)).setMapping( + mapping ) ); ensureGreen(); - client().prepareIndex("foo", "bar", "1") + client().prepareIndex("foo") + .setId("1") .setSource(jsonBuilder().startObject().startObject("foo").field("bar", "boz").endObject().endObject()) .setRouting("4000") .get(); client().admin().indices().prepareRefresh("foo").get(); SearchResponse response = client().prepareSearch() - .setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "bar", "1").routing("4000") })) + .setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "1").routing("4000") })) .get(); assertNoFailures(response); assertThat(response, notNullValue()); @@ -412,10 +386,12 @@ public void testNumericField() throws Exception { .endObject() ).get(); ensureGreen(); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("string_value", "lucene index").field("int_value", 1).endObject()) .get(); - client().prepareIndex("test", "type", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("string_value", "opensearch index").field("int_value", 42).endObject()) .get(); @@ -530,7 +506,7 @@ public void testMoreLikeThisWithFieldAlias() throws Exception { index("test", "_doc", "2", "text", "lucene release"); refresh(); - Item item = new Item("test", "_doc", "1"); + Item item = new Item("test", "1"); QueryBuilder query = QueryBuilders.moreLikeThisQuery(new String[] { "alias" }, null, new Item[] { item }) .minTermFreq(1) .minDocFreq(1); @@ -560,8 +536,7 @@ public void testSimpleMoreLikeInclude() throws Exception { logger.info("Indexing..."); client().index( - indexRequest("test").type("type1") - .id("1") + indexRequest("test").id("1") .source( jsonBuilder().startObject() .field("text", "Apache Lucene is a free/open source information retrieval software library") @@ -569,8 +544,7 @@ public void testSimpleMoreLikeInclude() throws Exception { ) ).actionGet(); client().index( - indexRequest("test").type("type1") - .id("2") + indexRequest("test").id("2") .source(jsonBuilder().startObject().field("text", "Lucene has been ported to other programming languages").endObject()) ).actionGet(); client().admin().indices().refresh(refreshRequest()).actionGet(); @@ -588,7 +562,7 @@ public void testSimpleMoreLikeInclude() throws Exception { response = client().prepareSearch() .setQuery( - new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "type1", "2") }).minTermFreq(1) + new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "2") }).minTermFreq(1) .minDocFreq(1) .include(true) .minimumShouldMatch("0%") @@ -629,18 +603,18 @@ public void testSimpleMoreLikeThisIds() throws Exception { logger.info("Indexing..."); List builders = new ArrayList<>(); - builders.add(client().prepareIndex("test", "type1").setSource("text", "lucene").setId("1")); - builders.add(client().prepareIndex("test", "type1").setSource("text", "lucene release").setId("2")); - builders.add(client().prepareIndex("test", "type1").setSource("text", "apache lucene").setId("3")); + builders.add(client().prepareIndex("test").setSource("text", "lucene").setId("1")); + builders.add(client().prepareIndex("test").setSource("text", "lucene release").setId("2")); + builders.add(client().prepareIndex("test").setSource("text", "apache lucene").setId("3")); indexRandom(true, builders); logger.info("Running MoreLikeThis"); - Item[] items = new Item[] { new Item(null, null, "1") }; + Item[] items = new Item[] { new Item(null, "1") }; MoreLikeThisQueryBuilder queryBuilder = QueryBuilders.moreLikeThisQuery(new String[] { "text" }, null, items) .include(true) .minTermFreq(1) .minDocFreq(1); - SearchResponse mltResponse = client().prepareSearch().setTypes("type1").setQuery(queryBuilder).get(); + SearchResponse mltResponse = client().prepareSearch().setQuery(queryBuilder).get(); assertHitCount(mltResponse, 3L); } @@ -656,10 +630,10 @@ public void testMoreLikeThisMultiValueFields() throws Exception { String[] values = { "aaaa", "bbbb", "cccc", "dddd", "eeee", "ffff", "gggg", "hhhh", "iiii", "jjjj" }; List builders = new ArrayList<>(values.length + 1); // index one document with all the values - builders.add(client().prepareIndex("test", "type1", "0").setSource("text", values)); + builders.add(client().prepareIndex("test").setId("0").setSource("text", values)); // index each document with only one of the values for (int i = 0; i < values.length; i++) { - builders.add(client().prepareIndex("test", "type1", String.valueOf(i + 1)).setSource("text", values[i])); + builders.add(client().prepareIndex("test").setId(String.valueOf(i + 1)).setSource("text", values[i])); } indexRandom(true, builders); @@ -667,12 +641,12 @@ public void testMoreLikeThisMultiValueFields() throws Exception { for (int i = 0; i < maxIters; i++) { int max_query_terms = randomIntBetween(1, values.length); logger.info("Running More Like This with max_query_terms = {}", max_query_terms); - MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new String[] { "text" }, null, new Item[] { new Item(null, null, "0") }) + MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new String[] { "text" }, null, new Item[] { new Item(null, "0") }) .minTermFreq(1) .minDocFreq(1) .maxQueryTerms(max_query_terms) .minimumShouldMatch("0%"); - SearchResponse response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get(); + SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get(); assertSearchResponse(response); assertHitCount(response, max_query_terms); } @@ -693,7 +667,7 @@ public void testMinimumShouldMatch() throws ExecutionException, InterruptedExcep for (int j = 1; j <= 10 - i; j++) { text += j + " "; } - builders.add(client().prepareIndex("test", "type1", i + "").setSource("text", text)); + builders.add(client().prepareIndex("test").setId(i + "").setSource("text", text)); } indexRandom(true, builders); @@ -705,7 +679,7 @@ public void testMinimumShouldMatch() throws ExecutionException, InterruptedExcep .minDocFreq(1) .minimumShouldMatch(minimumShouldMatch); logger.info("Testing with minimum_should_match = {}", minimumShouldMatch); - SearchResponse response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get(); + SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get(); assertSearchResponse(response); if (minimumShouldMatch.equals("0%")) { assertHitCount(response, 10); @@ -727,28 +701,37 @@ public void testMoreLikeThisArtificialDocs() throws Exception { doc.field("field" + i, generateRandomStringArray(5, 10, false) + "a"); // make sure they are not all empty } doc.endObject(); - indexRandom(true, client().prepareIndex("test", "type1", "0").setSource(doc)); + indexRandom(true, client().prepareIndex("test").setId("0").setSource(doc)); logger.info("Checking the document matches ..."); // routing to ensure we hit the shard with the doc - MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new Item[] { new Item("test", "type1", doc).routing("0") }).minTermFreq(0) + MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new Item[] { new Item("test", doc).routing("0") }).minTermFreq(0) .minDocFreq(0) .maxQueryTerms(100) .minimumShouldMatch("100%"); // strict all terms must match! - SearchResponse response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get(); + SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get(); assertSearchResponse(response); assertHitCount(response, 1); } public void testMoreLikeThisMalformedArtificialDocs() throws Exception { logger.info("Creating the index ..."); - assertAcked(prepareCreate("test").addMapping("type1", "text", "type=text,analyzer=whitespace", "date", "type=date")); + assertAcked( + prepareCreate("test").addMapping( + MapperService.SINGLE_MAPPING_NAME, + "text", + "type=text,analyzer=whitespace", + "date", + "type=date" + ) + ); ensureGreen("test"); logger.info("Creating an index with a single document ..."); indexRandom( true, - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("text", "Hello World!").field("date", "2009-01-01").endObject()) ); @@ -757,19 +740,17 @@ public void testMoreLikeThisMalformedArtificialDocs() throws Exception { .field("text", "Hello World!") .field("date", "this is not a date!") .endObject(); - MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new Item[] { new Item("test", "type1", malformedFieldDoc) }).minTermFreq(0) + MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new Item[] { new Item("test", malformedFieldDoc) }).minTermFreq(0) .minDocFreq(0) .minimumShouldMatch("0%"); - SearchResponse response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get(); + SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get(); assertSearchResponse(response); assertHitCount(response, 0); logger.info("Checking with an empty document ..."); XContentBuilder emptyDoc = jsonBuilder().startObject().endObject(); - mltQuery = moreLikeThisQuery(null, new Item[] { new Item("test", "type1", emptyDoc) }).minTermFreq(0) - .minDocFreq(0) - .minimumShouldMatch("0%"); - response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get(); + mltQuery = moreLikeThisQuery(null, new Item[] { new Item("test", emptyDoc) }).minTermFreq(0).minDocFreq(0).minimumShouldMatch("0%"); + response = client().prepareSearch("test").setQuery(mltQuery).get(); assertSearchResponse(response); assertHitCount(response, 0); @@ -778,10 +759,10 @@ public void testMoreLikeThisMalformedArtificialDocs() throws Exception { .field("text", "Hello World!") .field("date", "1000-01-01") // should be properly parsed but ignored ... .endObject(); - mltQuery = moreLikeThisQuery(null, new Item[] { new Item("test", "type1", normalDoc) }).minTermFreq(0) + mltQuery = moreLikeThisQuery(null, new Item[] { new Item("test", normalDoc) }).minTermFreq(0) .minDocFreq(0) .minimumShouldMatch("100%"); // strict all terms must match but date is ignored - response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get(); + response = client().prepareSearch("test").setQuery(mltQuery).get(); assertSearchResponse(response); assertHitCount(response, 1); } @@ -801,31 +782,31 @@ public void testMoreLikeThisUnlike() throws ExecutionException, InterruptedExcep logger.info("Indexing each field value of this document as a single document."); List builders = new ArrayList<>(); for (int i = 0; i < numFields; i++) { - builders.add(client().prepareIndex("test", "type1", i + "").setSource("field" + i, i + "")); + builders.add(client().prepareIndex("test").setId(i + "").setSource("field" + i, i + "")); } indexRandom(true, builders); logger.info("First check the document matches all indexed docs."); - MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new Item[] { new Item("test", "type1", doc) }).minTermFreq(0) + MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new Item[] { new Item("test", doc) }).minTermFreq(0) .minDocFreq(0) .maxQueryTerms(100) .minimumShouldMatch("0%"); - SearchResponse response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get(); + SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get(); assertSearchResponse(response); assertHitCount(response, numFields); logger.info("Now check like this doc, but ignore one doc in the index, then two and so on..."); List docs = new ArrayList<>(numFields); for (int i = 0; i < numFields; i++) { - docs.add(new Item("test", "type1", i + "")); - mltQuery = moreLikeThisQuery(null, new Item[] { new Item("test", "type1", doc) }).unlike(docs.toArray(new Item[docs.size()])) + docs.add(new Item("test", i + "")); + mltQuery = moreLikeThisQuery(null, new Item[] { new Item("test", doc) }).unlike(docs.toArray(new Item[docs.size()])) .minTermFreq(0) .minDocFreq(0) .maxQueryTerms(100) .include(true) .minimumShouldMatch("0%"); - response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get(); + response = client().prepareSearch("test").setQuery(mltQuery).get(); assertSearchResponse(response); assertHitCount(response, numFields - (i + 1)); } @@ -839,9 +820,11 @@ public void testSelectFields() throws IOException, ExecutionException, Interrupt indexRandom( true, - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("text", "hello world").field("text1", "opensearch").endObject()), - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("text", "goodby moon").field("text1", "opensearch").endObject()) ); @@ -849,7 +832,7 @@ public void testSelectFields() throws IOException, ExecutionException, Interrupt .minDocFreq(0) .include(true) .minimumShouldMatch("1%"); - SearchResponse response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get(); + SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get(); assertSearchResponse(response); assertHitCount(response, 2); @@ -857,18 +840,18 @@ public void testSelectFields() throws IOException, ExecutionException, Interrupt .minDocFreq(0) .include(true) .minimumShouldMatch("1%"); - response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get(); + response = client().prepareSearch("test").setQuery(mltQuery).get(); assertSearchResponse(response); assertHitCount(response, 1); } public void testWithRouting() throws IOException { - client().prepareIndex("index", "type", "1").setRouting("3").setSource("text", "this is a document").get(); - client().prepareIndex("index", "type", "2").setRouting("1").setSource("text", "this is another document").get(); - client().prepareIndex("index", "type", "3").setRouting("4").setSource("text", "this is yet another document").get(); + client().prepareIndex("index").setId("1").setRouting("3").setSource("text", "this is a document").get(); + client().prepareIndex("index").setId("2").setRouting("1").setSource("text", "this is another document").get(); + client().prepareIndex("index").setId("3").setRouting("4").setSource("text", "this is yet another document").get(); refresh("index"); - Item item = new Item("index", "type", "2").routing("1"); + Item item = new Item("index", "2").routing("1"); MoreLikeThisQueryBuilder moreLikeThisQueryBuilder = new MoreLikeThisQueryBuilder( new String[] { "text" }, null, @@ -915,7 +898,7 @@ public void testWithMissingRouting() throws IOException { Throwable cause = exception.getCause(); assertThat(cause, instanceOf(RoutingMissingException.class)); - assertThat(cause.getMessage(), equalTo("routing is required for [test]/[_doc]/[1]")); + assertThat(cause.getMessage(), equalTo("routing is required for [test]/[1]")); } { @@ -926,10 +909,7 @@ public void testWithMissingRouting() throws IOException { .setQuery( new MoreLikeThisQueryBuilder( null, - new Item[] { - new Item("test", "1").routing("1"), - new Item("test", "type1", "2"), - new Item("test", "type1", "3") } + new Item[] { new Item("test", "1").routing("1"), new Item("test", "2"), new Item("test", "3") } ).minTermFreq(1).minDocFreq(1) ) .get() @@ -937,7 +917,7 @@ public void testWithMissingRouting() throws IOException { Throwable cause = exception.getCause(); assertThat(cause, instanceOf(RoutingMissingException.class)); - assertThat(cause.getMessage(), equalTo("routing is required for [test]/[type1]/[2]")); + assertThat(cause.getMessage(), equalTo("routing is required for [test]/[2]")); } } } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/msearch/MultiSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/msearch/MultiSearchIT.java index 8f374793ea8bf..8226663abf49e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/msearch/MultiSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/msearch/MultiSearchIT.java @@ -49,8 +49,8 @@ public class MultiSearchIT extends OpenSearchIntegTestCase { public void testSimpleMultiSearch() { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type", "1").setSource("field", "xxx").get(); - client().prepareIndex("test", "type", "2").setSource("field", "yyy").get(); + client().prepareIndex("test").setId("1").setSource("field", "xxx").get(); + client().prepareIndex("test").setId("2").setSource("field", "yyy").get(); refresh(); MultiSearchResponse response = client().prepareMultiSearch() .add(client().prepareSearch("test").setQuery(QueryBuilders.termQuery("field", "xxx"))) @@ -73,7 +73,7 @@ public void testSimpleMultiSearchMoreRequests() { createIndex("test"); int numDocs = randomIntBetween(0, 16); for (int i = 0; i < numDocs; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("{}", XContentType.JSON).get(); } refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java index bac944cc51a78..c6c58e6fcb6a5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java @@ -82,7 +82,8 @@ public void testSimpleNested() throws Exception { searchResponse = client().prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("field1", "value1") @@ -101,7 +102,7 @@ public void testSimpleNested() throws Exception { .get(); waitForRelocation(ClusterHealthStatus.GREEN); - GetResponse getResponse = client().prepareGet("test", "type1", "1").get(); + GetResponse getResponse = client().prepareGet("test", "1").get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getSourceAsBytes(), notNullValue()); refresh(); @@ -133,7 +134,8 @@ public void testSimpleNested() throws Exception { // add another doc, one that would match if it was not nested... - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("field1", "value1") @@ -196,7 +198,7 @@ public void testSimpleNested() throws Exception { assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); // check delete, so all is gone... - DeleteResponse deleteResponse = client().prepareDelete("test", "type1", "2").get(); + DeleteResponse deleteResponse = client().prepareDelete("test", "2").get(); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); refresh(); @@ -231,7 +233,8 @@ public void testMultiNested() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("field", "value") @@ -263,7 +266,7 @@ public void testMultiNested() throws Exception { ) .get(); - GetResponse getResponse = client().prepareGet("test", "type1", "1").get(); + GetResponse getResponse = client().prepareGet("test", "1").get(); assertThat(getResponse.isExists(), equalTo(true)); waitForRelocation(ClusterHealthStatus.GREEN); refresh(); @@ -389,7 +392,8 @@ public void testDeleteNestedDocsWithAlias() throws Exception { ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("field1", "value1") @@ -407,7 +411,8 @@ public void testDeleteNestedDocsWithAlias() throws Exception { ) .get(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("field1", "value2") @@ -448,7 +453,8 @@ public void testExplain() throws Exception { ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("field1", "value1") @@ -500,7 +506,8 @@ public void testSimpleNestedSorting() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("field1", 1) @@ -515,7 +522,8 @@ public void testSimpleNestedSorting() throws Exception { .endObject() ) .get(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("field1", 2) @@ -530,7 +538,8 @@ public void testSimpleNestedSorting() throws Exception { .endObject() ) .get(); - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("field1", 3) @@ -548,7 +557,6 @@ public void testSimpleNestedSorting() throws Exception { refresh(); SearchResponse searchResponse = client().prepareSearch("test") - .setTypes("type1") .setQuery(QueryBuilders.matchAllQuery()) .addSort(SortBuilders.fieldSort("nested1.field1").order(SortOrder.ASC).setNestedPath("nested1")) .get(); @@ -562,7 +570,6 @@ public void testSimpleNestedSorting() throws Exception { assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("4")); searchResponse = client().prepareSearch("test") - .setTypes("type1") .setQuery(QueryBuilders.matchAllQuery()) .addSort(SortBuilders.fieldSort("nested1.field1").order(SortOrder.DESC).setNestedPath("nested1")) .get(); @@ -602,7 +609,8 @@ public void testSimpleNestedSortingWithNestedFilterMissing() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("field1", 1) @@ -619,7 +627,8 @@ public void testSimpleNestedSortingWithNestedFilterMissing() throws Exception { .endObject() ) .get(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("field1", 2) @@ -638,7 +647,8 @@ public void testSimpleNestedSortingWithNestedFilterMissing() throws Exception { .get(); // Doc with missing nested docs if nested filter is used refresh(); - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("field1", 3) @@ -658,7 +668,6 @@ public void testSimpleNestedSortingWithNestedFilterMissing() throws Exception { refresh(); SearchRequestBuilder searchRequestBuilder = client().prepareSearch("test") - .setTypes("type1") .setQuery(QueryBuilders.matchAllQuery()) .addSort( SortBuilders.fieldSort("nested1.field1") @@ -683,7 +692,6 @@ public void testSimpleNestedSortingWithNestedFilterMissing() throws Exception { assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("10")); searchRequestBuilder = client().prepareSearch("test") - .setTypes("type1") .setQuery(QueryBuilders.matchAllQuery()) .addSort( SortBuilders.fieldSort("nested1.field1") @@ -711,25 +719,22 @@ public void testSimpleNestedSortingWithNestedFilterMissing() throws Exception { public void testNestedSortWithMultiLevelFiltering() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( "{\n" - + " \"type1\": {\n" - + " \"properties\": {\n" - + " \"acl\": {\n" - + " \"type\": \"nested\",\n" - + " \"properties\": {\n" - + " \"access_id\": {\"type\": \"keyword\"},\n" - + " \"operation\": {\n" - + " \"type\": \"nested\",\n" - + " \"properties\": {\n" - + " \"name\": {\"type\": \"keyword\"},\n" - + " \"user\": {\n" - + " \"type\": \"nested\",\n" - + " \"properties\": {\n" - + " \"username\": {\"type\": \"keyword\"},\n" - + " \"id\": {\"type\": \"integer\"}\n" - + " }\n" + + " \"properties\": {\n" + + " \"acl\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"access_id\": {\"type\": \"keyword\"},\n" + + " \"operation\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"name\": {\"type\": \"keyword\"},\n" + + " \"user\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"username\": {\"type\": \"keyword\"},\n" + + " \"id\": {\"type\": \"integer\"}\n" + " }\n" + " }\n" + " }\n" @@ -737,13 +742,13 @@ public void testNestedSortWithMultiLevelFiltering() throws Exception { + " }\n" + " }\n" + " }\n" - + "}", - XContentType.JSON + + "}" ) ); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( "{\n" + " \"acl\": [\n" @@ -797,7 +802,8 @@ public void testNestedSortWithMultiLevelFiltering() throws Exception { ) .get(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( "{\n" + " \"acl\": [\n" @@ -955,8 +961,7 @@ public void testNestedSortWithMultiLevelFiltering() throws Exception { public void testLeakingSortValues() throws Exception { assertAcked( prepareCreate("test").setSettings(Settings.builder().put("number_of_shards", 1)) - .addMapping( - "test-type", + .setMapping( "{\n" + " \"dynamic\": \"strict\",\n" + " \"properties\": {\n" @@ -977,13 +982,13 @@ public void testLeakingSortValues() throws Exception { + " }\n" + " }\n" + " }\n" - + " }\n", - XContentType.JSON + + " }\n" ) ); ensureGreen(); - client().prepareIndex("test", "test-type", "1") + client().prepareIndex("test") + .setId("1") .setSource( "{\n" + " \"nested1\": [\n" @@ -1001,7 +1006,8 @@ public void testLeakingSortValues() throws Exception { ) .get(); - client().prepareIndex("test", "test-type", "2") + client().prepareIndex("test") + .setId("2") .setSource( "{\n" + " \"nested1\": [\n" @@ -1075,7 +1081,8 @@ public void testSortNestedWithNestedFilter() throws Exception { ensureGreen(); // sum: 11 - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("grand_parent_values", 1L) @@ -1117,7 +1124,8 @@ public void testSortNestedWithNestedFilter() throws Exception { .get(); // sum: 7 - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("grand_parent_values", 2L) @@ -1159,7 +1167,8 @@ public void testSortNestedWithNestedFilter() throws Exception { .get(); // sum: 2 - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .field("grand_parent_values", 3L) @@ -1473,7 +1482,8 @@ public void testNestedSortingWithNestedFilterAsFilter() throws Exception { ) ); - IndexResponse indexResponse1 = client().prepareIndex("test", "type", "1") + IndexResponse indexResponse1 = client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("officelocation", "gendale") @@ -1526,7 +1536,8 @@ public void testNestedSortingWithNestedFilterAsFilter() throws Exception { .get(); assertTrue(indexResponse1.getShardInfo().getSuccessful() > 0); - IndexResponse indexResponse2 = client().prepareIndex("test", "type", "2") + IndexResponse indexResponse2 = client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("officelocation", "gendale") @@ -1607,8 +1618,8 @@ public void testCheckFixedBitSetCache() throws Exception { } assertAcked(prepareCreate("test").setSettings(settingsBuilder).addMapping("type")); - client().prepareIndex("test", "type", "0").setSource("field", "value").get(); - client().prepareIndex("test", "type", "1").setSource("field", "value").get(); + client().prepareIndex("test").setId("0").setSource("field", "value").get(); + client().prepareIndex("test").setId("1").setSource("field", "value").get(); refresh(); ensureSearchable("test"); @@ -1617,7 +1628,7 @@ public void testCheckFixedBitSetCache() throws Exception { assertThat(clusterStatsResponse.getIndicesStats().getSegments().getBitsetMemoryInBytes(), equalTo(0L)); // Now add nested mapping - assertAcked(client().admin().indices().preparePutMapping("test").setType("type").setSource("array1", "type=nested")); + assertAcked(client().admin().indices().preparePutMapping("test").setSource("array1", "type=nested")); XContentBuilder builder = jsonBuilder().startObject() .startArray("array1") @@ -1627,11 +1638,11 @@ public void testCheckFixedBitSetCache() throws Exception { .endArray() .endObject(); // index simple data - client().prepareIndex("test", "type", "2").setSource(builder).get(); - client().prepareIndex("test", "type", "3").setSource(builder).get(); - client().prepareIndex("test", "type", "4").setSource(builder).get(); - client().prepareIndex("test", "type", "5").setSource(builder).get(); - client().prepareIndex("test", "type", "6").setSource(builder).get(); + client().prepareIndex("test").setId("2").setSource(builder).get(); + client().prepareIndex("test").setId("3").setSource(builder).get(); + client().prepareIndex("test").setId("4").setSource(builder).get(); + client().prepareIndex("test").setId("5").setSource(builder).get(); + client().prepareIndex("test").setId("6").setSource(builder).get(); refresh(); ensureSearchable("test"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/preference/SearchPreferenceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/preference/SearchPreferenceIT.java index fc80cb848f306..c69555d00170b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/preference/SearchPreferenceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/preference/SearchPreferenceIT.java @@ -83,7 +83,7 @@ public void testStopOneNodePreferenceWithRedState() throws IOException { ); ensureGreen(); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type1", "" + i).setSource("field1", "value1").get(); + client().prepareIndex("test").setId("" + i).setSource("field1", "value1").get(); } refresh(); internalCluster().stopRandomDataNode(); @@ -121,7 +121,7 @@ public void testNoPreferenceRandom() { ); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "value1").get(); + client().prepareIndex("test").setSource("field1", "value1").get(); refresh(); final Client client = internalCluster().smartClient(); @@ -137,7 +137,7 @@ public void testSimplePreference() { client().admin().indices().prepareCreate("test").setSettings("{\"number_of_replicas\": 1}", XContentType.JSON).get(); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "value1").get(); + client().prepareIndex("test").setSource("field1", "value1").get(); refresh(); SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).get(); @@ -170,7 +170,7 @@ public void testNodesOnlyRandom() { ) ); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "value1").get(); + client().prepareIndex("test").setSource("field1", "value1").get(); refresh(); final Client client = internalCluster().smartClient(); @@ -240,7 +240,7 @@ public void testCustomPreferenceUnaffectedByOtherShardMovements() { ) ); ensureGreen(); - client().prepareIndex("test", "_doc").setSource("field1", "value1").get(); + client().prepareIndex("test").setSource("field1", "value1").get(); refresh(); final String customPreference = randomAlphaOfLength(10); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/profile/ProfilerSingleNodeNetworkTest.java b/server/src/internalClusterTest/java/org/opensearch/search/profile/ProfilerSingleNodeNetworkTest.java index 2090ddc0a083b..24885c1f853ef 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/profile/ProfilerSingleNodeNetworkTest.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/profile/ProfilerSingleNodeNetworkTest.java @@ -35,7 +35,7 @@ public void testProfilerNetworkTime() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); } List stringFields = Arrays.asList("field1"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java index 1c3e5e03a2f25..7d63db78e205a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java @@ -120,7 +120,7 @@ protected void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < 5; i++) { builders.add( - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field(STRING_FIELD, randomFrom(randomStrings)) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java index 96e7fd0b843fa..65d3ee2779de8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java @@ -67,7 +67,7 @@ public void testProfileQuery() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); } List stringFields = Arrays.asList("field1"); @@ -121,7 +121,8 @@ public void testProfileMatchesRegular() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)) + docs[i] = client().prepareIndex("test") + .setId(String.valueOf(i)) .setSource("id", String.valueOf(i), "field1", English.intToEnglish(i), "field2", i); } @@ -199,7 +200,7 @@ public void testSimpleMatch() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); } indexRandom(true, docs); @@ -239,7 +240,7 @@ public void testBool() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); } indexRandom(true, docs); @@ -299,7 +300,7 @@ public void testEmptyBool() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); } indexRandom(true, docs); @@ -342,7 +343,7 @@ public void testCollapsingBool() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); } indexRandom(true, docs); @@ -382,7 +383,7 @@ public void testBoosting() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); } indexRandom(true, docs); @@ -422,7 +423,7 @@ public void testDisMaxRange() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); } indexRandom(true, docs); @@ -462,7 +463,7 @@ public void testRange() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); } indexRandom(true, docs); @@ -501,7 +502,8 @@ public void testPhrase() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)) + docs[i] = client().prepareIndex("test") + .setId(String.valueOf(i)) .setSource("field1", English.intToEnglish(i) + " " + English.intToEnglish(i + 1), "field2", i); } @@ -516,7 +518,6 @@ public void testPhrase() throws Exception { SearchResponse resp = client().prepareSearch() .setQuery(q) .setIndices("test") - .setTypes("type1") .setProfile(true) .setSearchType(SearchType.QUERY_THEN_FETCH) .get(); @@ -557,7 +558,7 @@ public void testNoProfile() throws Exception { int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); + docs[i] = client().prepareIndex("test").setId(String.valueOf(i)).setSource("field1", English.intToEnglish(i), "field2", i); } indexRandom(true, docs); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java index 113bdd5027c5d..89c614485b620 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java @@ -113,7 +113,7 @@ public void testExists() throws Exception { emptyMap() }; List reqs = new ArrayList<>(); for (Map source : sources) { - reqs.add(client().prepareIndex("idx", "type").setSource(source)); + reqs.add(client().prepareIndex("idx").setSource(source)); } // We do NOT index dummy documents, otherwise the type for these dummy documents // would have _field_names indexed while the current type might not which might @@ -156,9 +156,8 @@ public void testExists() throws Exception { } catch (AssertionError e) { for (SearchHit searchHit : allDocs.getHits()) { final String index = searchHit.getIndex(); - final String type = searchHit.getType(); final String id = searchHit.getId(); - final ExplainResponse explanation = client().prepareExplain(index, type, id) + final ExplainResponse explanation = client().prepareExplain(index, id) .setQuery(QueryBuilders.existsQuery(fieldName)) .get(); logger.info( @@ -201,11 +200,11 @@ public void testFieldAlias() throws Exception { ensureGreen("idx"); List indexRequests = new ArrayList<>(); - indexRequests.add(client().prepareIndex("idx", "type").setSource(emptyMap())); - indexRequests.add(client().prepareIndex("idx", "type").setSource(emptyMap())); - indexRequests.add(client().prepareIndex("idx", "type").setSource("bar", 3)); - indexRequests.add(client().prepareIndex("idx", "type").setSource("foo", singletonMap("bar", 2.718))); - indexRequests.add(client().prepareIndex("idx", "type").setSource("foo", singletonMap("bar", 6.283))); + indexRequests.add(client().prepareIndex("idx").setSource(emptyMap())); + indexRequests.add(client().prepareIndex("idx").setSource(emptyMap())); + indexRequests.add(client().prepareIndex("idx").setSource("bar", 3)); + indexRequests.add(client().prepareIndex("idx").setSource("foo", singletonMap("bar", 2.718))); + indexRequests.add(client().prepareIndex("idx").setSource("foo", singletonMap("bar", 6.283))); indexRandom(true, false, indexRequests); Map expected = new LinkedHashMap<>(); @@ -244,10 +243,10 @@ public void testFieldAliasWithNoDocValues() throws Exception { ensureGreen("idx"); List indexRequests = new ArrayList<>(); - indexRequests.add(client().prepareIndex("idx", "type").setSource(emptyMap())); - indexRequests.add(client().prepareIndex("idx", "type").setSource(emptyMap())); - indexRequests.add(client().prepareIndex("idx", "type").setSource("foo", 3)); - indexRequests.add(client().prepareIndex("idx", "type").setSource("foo", 43)); + indexRequests.add(client().prepareIndex("idx").setSource(emptyMap())); + indexRequests.add(client().prepareIndex("idx").setSource(emptyMap())); + indexRequests.add(client().prepareIndex("idx").setSource("foo", 3)); + indexRequests.add(client().prepareIndex("idx").setSource("foo", 43)); indexRandom(true, false, indexRequests); SearchResponse response = client().prepareSearch("idx").setQuery(QueryBuilders.existsQuery("foo-alias")).get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java index 0bf0c05617efd..b40a034fc2c92 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java @@ -114,7 +114,8 @@ public void init() throws Exception { int numDocs = scaledRandomIntBetween(50, 100); List builders = new ArrayList<>(); builders.add( - client().prepareIndex("test", "test", "theone") + client().prepareIndex("test") + .setId("theone") .setSource( "id", "theone", @@ -133,7 +134,8 @@ public void init() throws Exception { ) ); builders.add( - client().prepareIndex("test", "test", "theother") + client().prepareIndex("test") + .setId("theother") .setSource( "id", "theother", @@ -151,7 +153,8 @@ public void init() throws Exception { ); builders.add( - client().prepareIndex("test", "test", "ultimate1") + client().prepareIndex("test") + .setId("ultimate1") .setSource( "id", "ultimate1", @@ -168,7 +171,8 @@ public void init() throws Exception { ) ); builders.add( - client().prepareIndex("test", "test", "ultimate2") + client().prepareIndex("test") + .setId("ultimate2") .setSource( "full_name", "Man the Ultimate Ninja", @@ -184,7 +188,8 @@ public void init() throws Exception { ); builders.add( - client().prepareIndex("test", "test", "anotherhero") + client().prepareIndex("test") + .setId("anotherhero") .setSource( "id", "anotherhero", @@ -202,7 +207,8 @@ public void init() throws Exception { ); builders.add( - client().prepareIndex("test", "test", "nowHero") + client().prepareIndex("test") + .setId("nowHero") .setSource( "id", "nowHero", @@ -229,7 +235,8 @@ public void init() throws Exception { String first = RandomPicks.randomFrom(random(), firstNames); String last = randomPickExcept(lastNames, first); builders.add( - client().prepareIndex("test", "test", "" + i) + client().prepareIndex("test") + .setId("" + i) .setSource( "id", i, @@ -1013,8 +1020,8 @@ public void testFuzzyFieldLevelBoosting() throws InterruptedException, Execution assertAcked(builder.addMapping("type", "title", "type=text", "body", "type=text")); ensureGreen(); List builders = new ArrayList<>(); - builders.add(client().prepareIndex(idx, "type", "1").setSource("title", "foo", "body", "bar")); - builders.add(client().prepareIndex(idx, "type", "2").setSource("title", "bar", "body", "foo")); + builders.add(client().prepareIndex(idx).setId("1").setSource("title", "foo", "body", "bar")); + builders.add(client().prepareIndex(idx).setId("2").setSource("title", "bar", "body", "foo")); indexRandom(true, false, builders); SearchResponse searchResponse = client().prepareSearch(idx) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java index 248754451e1bb..5b2d87a6508fe 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java @@ -90,9 +90,9 @@ protected Settings nodeSettings(int nodeOrdinal) { public void testBasicAllQuery() throws Exception { List reqs = new ArrayList<>(); - reqs.add(client().prepareIndex("test", "_doc", "1").setSource("f1", "foo bar baz")); - reqs.add(client().prepareIndex("test", "_doc", "2").setSource("f2", "Bar")); - reqs.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "foo bar baz")); + reqs.add(client().prepareIndex("test").setId("1").setSource("f1", "foo bar baz")); + reqs.add(client().prepareIndex("test").setId("2").setSource("f2", "Bar")); + reqs.add(client().prepareIndex("test").setId("3").setSource("f3", "foo bar baz")); indexRandom(true, false, reqs); SearchResponse resp = client().prepareSearch("test").setQuery(queryStringQuery("foo")).get(); @@ -110,8 +110,8 @@ public void testBasicAllQuery() throws Exception { public void testWithDate() throws Exception { List reqs = new ArrayList<>(); - reqs.add(client().prepareIndex("test", "_doc", "1").setSource("f1", "foo", "f_date", "2015/09/02")); - reqs.add(client().prepareIndex("test", "_doc", "2").setSource("f1", "bar", "f_date", "2015/09/01")); + reqs.add(client().prepareIndex("test").setId("1").setSource("f1", "foo", "f_date", "2015/09/02")); + reqs.add(client().prepareIndex("test").setId("2").setSource("f1", "bar", "f_date", "2015/09/01")); indexRandom(true, false, reqs); SearchResponse resp = client().prepareSearch("test").setQuery(queryStringQuery("foo bar")).get(); @@ -134,10 +134,10 @@ public void testWithDate() throws Exception { public void testWithLotsOfTypes() throws Exception { List reqs = new ArrayList<>(); reqs.add( - client().prepareIndex("test", "_doc", "1").setSource("f1", "foo", "f_date", "2015/09/02", "f_float", "1.7", "f_ip", "127.0.0.1") + client().prepareIndex("test").setId("1").setSource("f1", "foo", "f_date", "2015/09/02", "f_float", "1.7", "f_ip", "127.0.0.1") ); reqs.add( - client().prepareIndex("test", "_doc", "2").setSource("f1", "bar", "f_date", "2015/09/01", "f_float", "1.8", "f_ip", "127.0.0.2") + client().prepareIndex("test").setId("2").setSource("f1", "bar", "f_date", "2015/09/01", "f_float", "1.8", "f_ip", "127.0.0.2") ); indexRandom(true, false, reqs); @@ -161,7 +161,7 @@ public void testWithLotsOfTypes() throws Exception { public void testDocWithAllTypes() throws Exception { List reqs = new ArrayList<>(); String docBody = copyToStringFromClasspath("/org/opensearch/search/query/all-example-document.json"); - reqs.add(client().prepareIndex("test", "_doc", "1").setSource(docBody, XContentType.JSON)); + reqs.add(client().prepareIndex("test").setId("1").setSource(docBody, XContentType.JSON)); indexRandom(true, false, reqs); SearchResponse resp = client().prepareSearch("test").setQuery(queryStringQuery("foo")).get(); @@ -198,9 +198,9 @@ public void testDocWithAllTypes() throws Exception { public void testKeywordWithWhitespace() throws Exception { List reqs = new ArrayList<>(); - reqs.add(client().prepareIndex("test", "_doc", "1").setSource("f2", "Foo Bar")); - reqs.add(client().prepareIndex("test", "_doc", "2").setSource("f1", "bar")); - reqs.add(client().prepareIndex("test", "_doc", "3").setSource("f1", "foo bar")); + reqs.add(client().prepareIndex("test").setId("1").setSource("f2", "Foo Bar")); + reqs.add(client().prepareIndex("test").setId("2").setSource("f1", "bar")); + reqs.add(client().prepareIndex("test").setId("3").setSource("f1", "foo bar")); indexRandom(true, false, reqs); SearchResponse resp = client().prepareSearch("test").setQuery(queryStringQuery("foo")).get(); @@ -224,7 +224,7 @@ public void testAllFields() throws Exception { ensureGreen("test_1"); List reqs = new ArrayList<>(); - reqs.add(client().prepareIndex("test_1", "_doc", "1").setSource("f1", "foo", "f2", "eggplant")); + reqs.add(client().prepareIndex("test_1").setId("1").setSource("f1", "foo", "f2", "eggplant")); indexRandom(true, false, reqs); SearchResponse resp = client().prepareSearch("test_1") @@ -239,8 +239,8 @@ public void testAllFields() throws Exception { public void testPhraseQueryOnFieldWithNoPositions() throws Exception { List reqs = new ArrayList<>(); - reqs.add(client().prepareIndex("test", "_doc", "1").setSource("f1", "foo bar", "f4", "eggplant parmesan")); - reqs.add(client().prepareIndex("test", "_doc", "2").setSource("f1", "foo bar", "f4", "chicken parmesan")); + reqs.add(client().prepareIndex("test").setId("1").setSource("f1", "foo bar", "f4", "eggplant parmesan")); + reqs.add(client().prepareIndex("test").setId("2").setSource("f1", "foo bar", "f4", "chicken parmesan")); indexRandom(true, false, reqs); SearchResponse resp = client().prepareSearch("test").setQuery(queryStringQuery("\"eggplant parmesan\"").lenient(true)).get(); @@ -289,7 +289,7 @@ public void testLimitOnExpandedFieldsButIgnoreUnmappedFields() throws Exception assertAcked(prepareCreate("ignoreunmappedfields").addMapping("_doc", builder)); - client().prepareIndex("ignoreunmappedfields", "_doc", "1").setSource("field1", "foo bar baz").get(); + client().prepareIndex("ignoreunmappedfields").setId("1").setSource("field1", "foo bar baz").get(); refresh(); QueryStringQueryBuilder qb = queryStringQuery("bar"); @@ -324,7 +324,7 @@ public void testLimitOnExpandedFields() throws Exception { ).addMapping("_doc", builder) ); - client().prepareIndex("testindex", "_doc", "1").setSource("field_A0", "foo bar baz").get(); + client().prepareIndex("testindex").setId("1").setSource("field_A0", "foo bar baz").get(); refresh(); // single field shouldn't trigger the limit @@ -375,9 +375,9 @@ private void doAssertLimitExceededException(String queryString, int exceedingFie public void testFieldAlias() throws Exception { List indexRequests = new ArrayList<>(); - indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); - indexRequests.add(client().prepareIndex("test", "_doc", "2").setSource("f3", "value", "f2", "two")); - indexRequests.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "another value", "f2", "three")); + indexRequests.add(client().prepareIndex("test").setId("1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test").setId("2").setSource("f3", "value", "f2", "two")); + indexRequests.add(client().prepareIndex("test").setId("3").setSource("f3", "another value", "f2", "three")); indexRandom(true, false, indexRequests); SearchResponse response = client().prepareSearch("test").setQuery(queryStringQuery("value").field("f3_alias")).get(); @@ -389,9 +389,9 @@ public void testFieldAlias() throws Exception { public void testFieldAliasWithEmbeddedFieldNames() throws Exception { List indexRequests = new ArrayList<>(); - indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); - indexRequests.add(client().prepareIndex("test", "_doc", "2").setSource("f3", "value", "f2", "two")); - indexRequests.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "another value", "f2", "three")); + indexRequests.add(client().prepareIndex("test").setId("1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test").setId("2").setSource("f3", "value", "f2", "two")); + indexRequests.add(client().prepareIndex("test").setId("3").setSource("f3", "another value", "f2", "three")); indexRandom(true, false, indexRequests); SearchResponse response = client().prepareSearch("test").setQuery(queryStringQuery("f3_alias:value AND f2:three")).get(); @@ -403,9 +403,9 @@ public void testFieldAliasWithEmbeddedFieldNames() throws Exception { public void testFieldAliasWithWildcardField() throws Exception { List indexRequests = new ArrayList<>(); - indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); - indexRequests.add(client().prepareIndex("test", "_doc", "2").setSource("f3", "value", "f2", "two")); - indexRequests.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "another value", "f2", "three")); + indexRequests.add(client().prepareIndex("test").setId("1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test").setId("2").setSource("f3", "value", "f2", "two")); + indexRequests.add(client().prepareIndex("test").setId("3").setSource("f3", "another value", "f2", "three")); indexRandom(true, false, indexRequests); SearchResponse response = client().prepareSearch("test").setQuery(queryStringQuery("value").field("f3_*")).get(); @@ -417,7 +417,7 @@ public void testFieldAliasWithWildcardField() throws Exception { public void testFieldAliasOnDisallowedFieldType() throws Exception { List indexRequests = new ArrayList<>(); - indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test").setId("1").setSource("f3", "text", "f2", "one")); indexRandom(true, false, indexRequests); // The wildcard field matches aliases for both a text and geo_point field. diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java index c36b0f0fedbc9..59f9e020df0d9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java @@ -91,7 +91,7 @@ public void testScriptScore() { assertAcked(prepareCreate("test-index").addMapping("_doc", "field1", "type=text", "field2", "type=double")); int docCount = 10; for (int i = 1; i <= docCount; i++) { - client().prepareIndex("test-index", "_doc", "" + i).setSource("field1", "text" + (i % 2), "field2", i).get(); + client().prepareIndex("test-index").setId("" + i).setSource("field1", "text" + (i % 2), "field2", i).get(); } refresh(); @@ -117,7 +117,7 @@ public void testScriptScoreBoolQuery() { assertAcked(prepareCreate("test-index").addMapping("_doc", "field1", "type=text", "field2", "type=double")); int docCount = 10; for (int i = 1; i <= docCount; i++) { - client().prepareIndex("test-index", "_doc", "" + i).setSource("field1", "text" + i, "field2", i).get(); + client().prepareIndex("test-index").setId("" + i).setSource("field1", "text" + i, "field2", i).get(); } refresh(); @@ -138,9 +138,9 @@ public void testRewrittenQuery() { prepareCreate("test-index2").setSettings(Settings.builder().put("index.number_of_shards", 1)) .addMapping("_doc", "field1", "type=date", "field2", "type=double") ); - client().prepareIndex("test-index2", "_doc", "1").setSource("field1", "2019-09-01", "field2", 1).get(); - client().prepareIndex("test-index2", "_doc", "2").setSource("field1", "2019-10-01", "field2", 2).get(); - client().prepareIndex("test-index2", "_doc", "3").setSource("field1", "2019-11-01", "field2", 3).get(); + client().prepareIndex("test-index2").setId("1").setSource("field1", "2019-09-01", "field2", 1).get(); + client().prepareIndex("test-index2").setId("2").setSource("field1", "2019-10-01", "field2", 2).get(); + client().prepareIndex("test-index2").setId("3").setSource("field1", "2019-11-01", "field2", 3).get(); refresh(); RangeQueryBuilder rangeQB = new RangeQueryBuilder("field1").from("2019-01-01"); // the query should be rewritten to from:null @@ -157,7 +157,7 @@ public void testDisallowExpensiveQueries() { assertAcked(prepareCreate("test-index").addMapping("_doc", "field1", "type=text", "field2", "type=double")); int docCount = 10; for (int i = 1; i <= docCount; i++) { - client().prepareIndex("test-index", "_doc").setId("" + i).setSource("field1", "text" + (i % 2), "field2", i).get(); + client().prepareIndex("test-index").setId("" + i).setSource("field1", "text" + (i % 2), "field2", i).get(); } refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java index 38ab776aa94d0..c9bb746973226 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java @@ -45,7 +45,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.SearchType; import org.opensearch.bootstrap.JavaVersion; -import org.opensearch.common.Strings; import org.opensearch.common.document.DocumentField; import org.opensearch.common.lucene.search.SpanBooleanQueryRewriteWithMaxClause; import org.opensearch.common.regex.Regex; @@ -124,7 +123,6 @@ import static org.opensearch.index.query.QueryBuilders.termQuery; import static org.opensearch.index.query.QueryBuilders.termsLookupQuery; import static org.opensearch.index.query.QueryBuilders.termsQuery; -import static org.opensearch.index.query.QueryBuilders.typeQuery; import static org.opensearch.index.query.QueryBuilders.wildcardQuery; import static org.opensearch.index.query.QueryBuilders.wrapperQuery; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; @@ -165,9 +163,9 @@ public void testEmptyQueryString() throws ExecutionException, InterruptedExcepti createIndex("test"); indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox jumps"), - client().prepareIndex("test", "type1", "2").setSource("field1", "quick brown"), - client().prepareIndex("test", "type1", "3").setSource("field1", "quick") + client().prepareIndex("test").setId("1").setSource("field1", "the quick brown fox jumps"), + client().prepareIndex("test").setId("2").setSource("field1", "quick brown"), + client().prepareIndex("test").setId("3").setSource("field1", "quick") ); assertHitCount(client().prepareSearch().setQuery(queryStringQuery("quick")).get(), 3L); @@ -177,9 +175,9 @@ public void testEmptyQueryString() throws ExecutionException, InterruptedExcepti // see https://github.com/elastic/elasticsearch/issues/3177 public void testIssue3177() { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "value2").get(); - client().prepareIndex("test", "type1", "3").setSource("field1", "value3").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); + client().prepareIndex("test").setId("2").setSource("field1", "value2").get(); + client().prepareIndex("test").setId("3").setSource("field1", "value3").get(); ensureGreen(); waitForRelocation(); forceMerge(); @@ -216,9 +214,8 @@ public void testIndexOptions() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=text,index_options=docs")); indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("field1", "quick brown fox", "field2", "quick brown fox"), - client().prepareIndex("test", "type1", "2") - .setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox") + client().prepareIndex("test").setId("1").setSource("field1", "quick brown fox", "field2", "quick brown fox"), + client().prepareIndex("test").setId("2").setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox") ); SearchResponse searchResponse = client().prepareSearch().setQuery(matchPhraseQuery("field2", "quick brown").slop(0)).get(); @@ -237,9 +234,8 @@ public void testConstantScoreQuery() throws Exception { createIndex("test"); indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("field1", "quick brown fox", "field2", "quick brown fox"), - client().prepareIndex("test", "type1", "2") - .setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox") + client().prepareIndex("test").setId("1").setSource("field1", "quick brown fox", "field2", "quick brown fox"), + client().prepareIndex("test").setId("2").setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox") ); SearchResponse searchResponse = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("field1", "quick"))).get(); @@ -281,7 +277,7 @@ public void testConstantScoreQuery() throws Exception { int num = scaledRandomIntBetween(100, 200); IndexRequestBuilder[] builders = new IndexRequestBuilder[num]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex("test_1", "type", "" + i).setSource("f", English.intToEnglish(i)); + builders[i] = client().prepareIndex("test_1").setId("" + i).setSource("f", English.intToEnglish(i)); } createIndex("test_1"); indexRandom(true, builders); @@ -318,8 +314,8 @@ public void testAllDocsQueryString() throws InterruptedException, ExecutionExcep createIndex("test"); indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("foo", "bar"), - client().prepareIndex("test", "type1", "2").setSource("foo", "bar") + client().prepareIndex("test").setId("1").setSource("foo", "bar"), + client().prepareIndex("test").setId("2").setSource("foo", "bar") ); int iters = scaledRandomIntBetween(100, 200); @@ -346,10 +342,11 @@ public void testCommonTermsQuery() throws Exception { .get(); indexRandom( true, - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource("field1", "quick lazy huge brown pidgin", "field2", "the quick lazy huge brown fox jumps over the tree"), - client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox"), - client().prepareIndex("test", "type1", "2").setSource("field1", "the quick lazy huge brown fox jumps over the tree") + client().prepareIndex("test").setId("1").setSource("field1", "the quick brown fox"), + client().prepareIndex("test").setId("2").setSource("field1", "the quick lazy huge brown fox jumps over the tree") ); SearchResponse searchResponse = client().prepareSearch() @@ -443,7 +440,7 @@ public void testCommonTermsQuery() throws Exception { public void testQueryStringAnalyzedWildcard() throws Exception { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value_1", "field2", "value_2").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value_1", "field2", "value_2").get(); refresh(); SearchResponse searchResponse = client().prepareSearch().setQuery(queryStringQuery("value*")).get(); @@ -465,7 +462,7 @@ public void testQueryStringAnalyzedWildcard() throws Exception { public void testLowercaseExpandedTerms() { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value_1", "field2", "value_2").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value_1", "field2", "value_2").get(); refresh(); SearchResponse searchResponse = client().prepareSearch().setQuery(queryStringQuery("VALUE_3~1")).get(); @@ -487,7 +484,7 @@ public void testDateRangeInQueryString() { ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); String aMonthAgo = DateTimeFormatter.ISO_LOCAL_DATE.format(now.minusMonths(1)); String aMonthFromNow = DateTimeFormatter.ISO_LOCAL_DATE.format(now.plusMonths(1)); - client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); + client().prepareIndex("test").setId("1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); refresh(); SearchResponse searchResponse = client().prepareSearch().setQuery(queryStringQuery("past:[now-2M/d TO now/d]")).get(); @@ -513,7 +510,7 @@ public void testDateRangeInQueryStringWithTimeZone_7880() { ZoneId timeZone = randomZone(); String now = DateFormatter.forPattern("strict_date_optional_time").format(Instant.now().atZone(timeZone)); logger.info(" --> Using time_zone [{}], now is [{}]", timeZone.getId(), now); - client().prepareIndex("test", "type", "1").setSource("past", now).get(); + client().prepareIndex("test").setId("1").setSource("past", now).get(); refresh(); SearchResponse searchResponse = client().prepareSearch() @@ -528,8 +525,8 @@ public void testDateRangeInQueryStringWithTimeZone_10477() { // as with dynamic mappings some shards might be lacking behind and parse a different query assertAcked(prepareCreate("test").addMapping("type", "past", "type=date")); - client().prepareIndex("test", "type", "1").setSource("past", "2015-04-05T23:00:00+0000").get(); - client().prepareIndex("test", "type", "2").setSource("past", "2015-04-06T00:00:00+0000").get(); + client().prepareIndex("test").setId("1").setSource("past", "2015-04-05T23:00:00+0000").get(); + client().prepareIndex("test").setId("2").setSource("past", "2015-04-06T00:00:00+0000").get(); refresh(); // Timezone set with dates @@ -557,56 +554,29 @@ public void testDateRangeInQueryStringWithTimeZone_10477() { assertHitCount(searchResponse, 0L); } - public void testTypeFilter() throws Exception { - assertAcked(prepareCreate("test")); - indexRandom( - true, - client().prepareIndex("test", "type1", "1").setSource("field1", "value1"), - client().prepareIndex("test", "type1", "2").setSource("field1", "value1") - ); - - assertHitCount(client().prepareSearch().setQuery(typeQuery("type1")).get(), 2L); - assertHitCount(client().prepareSearch().setQuery(typeQuery("type2")).get(), 0L); - - assertHitCount(client().prepareSearch().setTypes("type1").setQuery(matchAllQuery()).get(), 2L); - assertHitCount(client().prepareSearch().setTypes("type2").setQuery(matchAllQuery()).get(), 0L); - - assertHitCount(client().prepareSearch().setTypes("type1", "type2").setQuery(matchAllQuery()).get(), 2L); - } - public void testIdsQueryTestsIdIndexed() throws Exception { assertAcked(client().admin().indices().prepareCreate("test")); indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("field1", "value1"), - client().prepareIndex("test", "type1", "2").setSource("field1", "value2"), - client().prepareIndex("test", "type1", "3").setSource("field1", "value3") + client().prepareIndex("test").setId("1").setSource("field1", "value1"), + client().prepareIndex("test").setId("2").setSource("field1", "value2"), + client().prepareIndex("test").setId("3").setSource("field1", "value3") ); - SearchResponse searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery("type1").addIds("1", "3"))).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "3"); - - // no type - searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery().addIds("1", "3"))).get(); + SearchResponse searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery().addIds("1", "3"))).get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "1", "3"); - searchResponse = client().prepareSearch().setQuery(idsQuery("type1").addIds("1", "3")).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "3"); - - // no type searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1", "3")).get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "1", "3"); - searchResponse = client().prepareSearch().setQuery(idsQuery("type1").addIds("7", "10")).get(); + searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("7", "10")).get(); assertHitCount(searchResponse, 0L); // repeat..., with terms - searchResponse = client().prepareSearch().setTypes("type1").setQuery(constantScoreQuery(termsQuery("_id", "1", "3"))).get(); + searchResponse = client().prepareSearch().setQuery(constantScoreQuery(termsQuery("_id", "1", "3"))).get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "1", "3"); } @@ -616,7 +586,7 @@ public void testTermIndexQuery() throws Exception { for (String indexName : indexNames) { assertAcked(client().admin().indices().prepareCreate(indexName)); - indexRandom(true, client().prepareIndex(indexName, "type1", indexName + "1").setSource("field1", "value1")); + indexRandom(true, client().prepareIndex(indexName).setId(indexName + "1").setSource("field1", "value1")); } @@ -650,7 +620,8 @@ public void testFilterExistsMissing() throws Exception { indexRandom( true, - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .startObject("obj1") @@ -661,7 +632,8 @@ public void testFilterExistsMissing() throws Exception { .field("field2", "value2_1") .endObject() ), - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .startObject("obj1") @@ -671,7 +643,8 @@ public void testFilterExistsMissing() throws Exception { .field("field1", "value1_2") .endObject() ), - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .startObject("obj2") @@ -681,7 +654,8 @@ public void testFilterExistsMissing() throws Exception { .field("field2", "value2_3") .endObject() ), - client().prepareIndex("test", "type1", "4") + client().prepareIndex("test") + .setId("4") .setSource( jsonBuilder().startObject() .startObject("obj2") @@ -727,7 +701,7 @@ public void testFilterExistsMissing() throws Exception { public void testPassQueryOrFilterAsJSONString() throws Exception { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1_1", "field2", "value2_1").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1_1", "field2", "value2_1").setRefreshPolicy(IMMEDIATE).get(); WrapperQueryBuilder wrapper = new WrapperQueryBuilder("{ \"term\" : { \"field1\" : \"value1_1\" } }"); assertHitCount(client().prepareSearch().setQuery(wrapper).get(), 1L); @@ -742,7 +716,7 @@ public void testPassQueryOrFilterAsJSONString() throws Exception { public void testFiltersWithCustomCacheKey() throws Exception { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); refresh(); SearchResponse searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("field1", "value1"))).get(); assertHitCount(searchResponse, 1L); @@ -762,9 +736,9 @@ public void testMatchQueryNumeric() throws Exception { indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("long", 1L, "double", 1.0d), - client().prepareIndex("test", "type1", "2").setSource("long", 2L, "double", 2.0d), - client().prepareIndex("test", "type1", "3").setSource("long", 3L, "double", 3.0d) + client().prepareIndex("test").setId("1").setSource("long", 1L, "double", 1.0d), + client().prepareIndex("test").setId("2").setSource("long", 2L, "double", 2.0d), + client().prepareIndex("test").setId("3").setSource("long", 3L, "double", 3.0d) ); SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("long", "1")).get(); @@ -782,8 +756,8 @@ public void testMatchQueryFuzzy() throws Exception { indexRandom( true, - client().prepareIndex("test", "_doc", "1").setSource("text", "Unit"), - client().prepareIndex("test", "_doc", "2").setSource("text", "Unity") + client().prepareIndex("test").setId("1").setSource("text", "Unit"), + client().prepareIndex("test").setId("2").setSource("text", "Unity") ); SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("text", "uniy").fuzziness("0")).get(); @@ -810,9 +784,9 @@ public void testMultiMatchQuery() throws Exception { indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value4", "field3", "value3"), - client().prepareIndex("test", "type1", "2").setSource("field1", "value2", "field2", "value5", "field3", "value2"), - client().prepareIndex("test", "type1", "3").setSource("field1", "value3", "field2", "value6", "field3", "value1") + client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value4", "field3", "value3"), + client().prepareIndex("test").setId("2").setSource("field1", "value2", "field2", "value5", "field3", "value2"), + client().prepareIndex("test").setId("3").setSource("field1", "value3", "field2", "value6", "field3", "value1") ); MultiMatchQueryBuilder builder = multiMatchQuery("value1 value2 value4", "field1", "field2"); @@ -854,7 +828,7 @@ public void testMultiMatchQuery() throws Exception { assertSearchHits(searchResponse, "3", "1"); // Test lenient - client().prepareIndex("test", "type1", "3").setSource("field1", "value7", "field2", "value8", "field4", 5).get(); + client().prepareIndex("test").setId("3").setSource("field1", "value7", "field2", "value8", "field4", 5).get(); refresh(); builder = multiMatchQuery("value1", "field1", "field2", "field4"); @@ -875,8 +849,8 @@ public void testMatchQueryZeroTermsQuery() { assertAcked( prepareCreate("test").addMapping("type1", "field1", "type=text,analyzer=classic", "field2", "type=text,analyzer=classic") ); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "value2").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); + client().prepareIndex("test").setId("2").setSource("field1", "value2").get(); refresh(); BoolQueryBuilder boolQuery = boolQuery().must(matchQuery("field1", "a").zeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE)) @@ -898,8 +872,8 @@ public void testMultiMatchQueryZeroTermsQuery() { assertAcked( prepareCreate("test").addMapping("type1", "field1", "type=text,analyzer=classic", "field2", "type=text,analyzer=classic") ); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "value3", "field2", "value4").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2").get(); + client().prepareIndex("test").setId("2").setSource("field1", "value3", "field2", "value4").get(); refresh(); BoolQueryBuilder boolQuery = boolQuery().must( @@ -922,8 +896,8 @@ public void testMultiMatchQueryZeroTermsQuery() { public void testMultiMatchQueryMinShouldMatch() { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("field1", new String[] { "value1", "value2", "value3" }).get(); - client().prepareIndex("test", "type1", "2").setSource("field2", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field1", new String[] { "value1", "value2", "value3" }).get(); + client().prepareIndex("test").setId("2").setSource("field2", "value1").get(); refresh(); MultiMatchQueryBuilder multiMatchQuery = multiMatchQuery("value1 value2 foo", "field1", "field2"); @@ -968,8 +942,8 @@ public void testMultiMatchQueryMinShouldMatch() { public void testBoolQueryMinShouldMatchBiggerThanNumberOfShouldClauses() throws IOException { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("field1", new String[] { "value1", "value2", "value3" }).get(); - client().prepareIndex("test", "type1", "2").setSource("field2", "value1").get(); + client().prepareIndex("test").setId("1").setSource("field1", new String[] { "value1", "value2", "value3" }).get(); + client().prepareIndex("test").setId("2").setSource("field2", "value1").get(); refresh(); BoolQueryBuilder boolQuery = boolQuery().must(termQuery("field1", "value1")) @@ -1000,8 +974,8 @@ public void testBoolQueryMinShouldMatchBiggerThanNumberOfShouldClauses() throws public void testFuzzyQueryString() { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("str", "foobar", "date", "2012-02-01", "num", 12).get(); - client().prepareIndex("test", "type1", "2").setSource("str", "fred", "date", "2012-02-05", "num", 20).get(); + client().prepareIndex("test").setId("1").setSource("str", "foobar", "date", "2012-02-01", "num", 12).get(); + client().prepareIndex("test").setId("2").setSource("str", "fred", "date", "2012-02-05", "num", 20).get(); refresh(); SearchResponse searchResponse = client().prepareSearch().setQuery(queryStringQuery("str:foobaz~1")).get(); @@ -1018,8 +992,8 @@ public void testQuotedQueryStringWithBoost() throws InterruptedException { indexRandom( true, false, - client().prepareIndex("test", "type1", "1").setSource("important", "phrase match", "less_important", "nothing important"), - client().prepareIndex("test", "type1", "2").setSource("important", "nothing important", "less_important", "phrase match") + client().prepareIndex("test").setId("1").setSource("important", "phrase match", "less_important", "nothing important"), + client().prepareIndex("test").setId("2").setSource("important", "nothing important", "less_important", "phrase match") ); SearchResponse searchResponse = client().prepareSearch() @@ -1036,8 +1010,8 @@ public void testQuotedQueryStringWithBoost() throws InterruptedException { public void testSpecialRangeSyntaxInQueryString() { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("str", "foobar", "date", "2012-02-01", "num", 12).get(); - client().prepareIndex("test", "type1", "2").setSource("str", "fred", "date", "2012-02-05", "num", 20).get(); + client().prepareIndex("test").setId("1").setSource("str", "foobar", "date", "2012-02-01", "num", 12).get(); + client().prepareIndex("test").setId("2").setSource("str", "fred", "date", "2012-02-05", "num", 20).get(); refresh(); SearchResponse searchResponse = client().prepareSearch().setQuery(queryStringQuery("num:>19")).get(); @@ -1069,10 +1043,10 @@ public void testEmptytermsQuery() throws Exception { indexRandom( true, - client().prepareIndex("test", "type", "1").setSource("term", "1"), - client().prepareIndex("test", "type", "2").setSource("term", "2"), - client().prepareIndex("test", "type", "3").setSource("term", "3"), - client().prepareIndex("test", "type", "4").setSource("term", "4") + client().prepareIndex("test").setId("1").setSource("term", "1"), + client().prepareIndex("test").setId("2").setSource("term", "2"), + client().prepareIndex("test").setId("3").setSource("term", "3"), + client().prepareIndex("test").setId("4").setSource("term", "4") ); SearchResponse searchResponse = client().prepareSearch("test") @@ -1089,10 +1063,10 @@ public void testTermsQuery() throws Exception { indexRandom( true, - client().prepareIndex("test", "type", "1").setSource("str", "1", "lng", 1L, "dbl", 1.0d), - client().prepareIndex("test", "type", "2").setSource("str", "2", "lng", 2L, "dbl", 2.0d), - client().prepareIndex("test", "type", "3").setSource("str", "3", "lng", 3L, "dbl", 3.0d), - client().prepareIndex("test", "type", "4").setSource("str", "4", "lng", 4L, "dbl", 4.0d) + client().prepareIndex("test").setId("1").setSource("str", "1", "lng", 1L, "dbl", 1.0d), + client().prepareIndex("test").setId("2").setSource("str", "2", "lng", 2L, "dbl", 2.0d), + client().prepareIndex("test").setId("3").setSource("str", "3", "lng", 3L, "dbl", 3.0d), + client().prepareIndex("test").setId("4").setSource("str", "4", "lng", 4L, "dbl", 4.0d) ); SearchResponse searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("str", "1", "4"))).get(); @@ -1167,11 +1141,12 @@ public void testTermsLookupFilter() throws Exception { indexRandom( true, - client().prepareIndex("lookup", "type", "1").setSource("terms", new String[] { "1", "3" }), - client().prepareIndex("lookup", "type", "2").setSource("terms", new String[] { "2" }), - client().prepareIndex("lookup", "type", "3").setSource("terms", new String[] { "2", "4" }), - client().prepareIndex("lookup", "type", "4").setSource("other", "value"), - client().prepareIndex("lookup2", "type", "1") + client().prepareIndex("lookup").setId("1").setSource("terms", new String[] { "1", "3" }), + client().prepareIndex("lookup").setId("2").setSource("terms", new String[] { "2" }), + client().prepareIndex("lookup").setId("3").setSource("terms", new String[] { "2", "4" }), + client().prepareIndex("lookup").setId("4").setSource("other", "value"), + client().prepareIndex("lookup2") + .setId("1") .setSource( XContentFactory.jsonBuilder() .startObject() @@ -1185,7 +1160,8 @@ public void testTermsLookupFilter() throws Exception { .endArray() .endObject() ), - client().prepareIndex("lookup2", "type", "2") + client().prepareIndex("lookup2") + .setId("2") .setSource( XContentFactory.jsonBuilder() .startObject() @@ -1196,7 +1172,8 @@ public void testTermsLookupFilter() throws Exception { .endArray() .endObject() ), - client().prepareIndex("lookup2", "type", "3") + client().prepareIndex("lookup2") + .setId("3") .setSource( XContentFactory.jsonBuilder() .startObject() @@ -1210,95 +1187,83 @@ public void testTermsLookupFilter() throws Exception { .endArray() .endObject() ), - client().prepareIndex("lookup3", "type", "1").setSource("terms", new String[] { "1", "3" }), - client().prepareIndex("test", "type", "1").setSource("term", "1"), - client().prepareIndex("test", "type", "2").setSource("term", "2"), - client().prepareIndex("test", "type", "3").setSource("term", "3"), - client().prepareIndex("test", "type", "4").setSource("term", "4") + client().prepareIndex("lookup3").setId("1").setSource("terms", new String[] { "1", "3" }), + client().prepareIndex("test").setId("1").setSource("term", "1"), + client().prepareIndex("test").setId("2").setSource("term", "2"), + client().prepareIndex("test").setId("3").setSource("term", "3"), + client().prepareIndex("test").setId("4").setSource("term", "4") ); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "1", "terms"))) + .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "1", "terms"))) .get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "1", "3"); // same as above, just on the _id... - searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("_id", new TermsLookup("lookup", "type", "1", "terms"))) - .get(); + searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("_id", new TermsLookup("lookup", "1", "terms"))).get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "1", "3"); // another search with same parameters... - searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "1", "terms"))) - .get(); + searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "1", "terms"))).get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "1", "3"); - searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "2", "terms"))) - .get(); + searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "2", "terms"))).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("2")); - searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "3", "terms"))) - .get(); + searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "3", "terms"))).get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "2", "4"); - searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "4", "terms"))) - .get(); + searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "4", "terms"))).get(); assertHitCount(searchResponse, 0L); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "type", "1", "arr.term"))) + .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "1", "arr.term"))) .get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "1", "3"); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "type", "2", "arr.term"))) + .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "2", "arr.term"))) .get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("2")); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "type", "3", "arr.term"))) + .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "3", "arr.term"))) .get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "2", "4"); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("not_exists", new TermsLookup("lookup2", "type", "3", "arr.term"))) + .setQuery(termsLookupQuery("not_exists", new TermsLookup("lookup2", "3", "arr.term"))) .get(); assertHitCount(searchResponse, 0L); // index "lookup" type "type" id "missing" document does not exist: ignore the lookup terms searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "missing", "terms"))) + .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "missing", "terms"))) .get(); assertHitCount(searchResponse, 0L); // index "lookup3" type "type" has the source disabled: ignore the lookup terms - searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup3", "type", "1", "terms"))) - .get(); + searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup3", "1", "terms"))).get(); assertHitCount(searchResponse, 0L); } public void testBasicQueryById() throws Exception { assertAcked(prepareCreate("test")); - client().prepareIndex("test", "_doc", "1").setSource("field1", "value1").get(); - client().prepareIndex("test", "_doc", "2").setSource("field1", "value2").get(); - client().prepareIndex("test", "_doc", "3").setSource("field1", "value3").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); + client().prepareIndex("test").setId("2").setSource("field1", "value2").get(); + client().prepareIndex("test").setId("3").setSource("field1", "value3").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch().setQuery(idsQuery("_doc").addIds("1", "2")).get(); + SearchResponse searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1", "2")).get(); assertHitCount(searchResponse, 2L); assertThat(searchResponse.getHits().getHits().length, equalTo(2)); @@ -1310,11 +1275,11 @@ public void testBasicQueryById() throws Exception { assertHitCount(searchResponse, 2L); assertThat(searchResponse.getHits().getHits().length, equalTo(2)); - searchResponse = client().prepareSearch().setQuery(idsQuery(Strings.EMPTY_ARRAY).addIds("1")).get(); + searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1")).get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - searchResponse = client().prepareSearch().setQuery(idsQuery("type1", "type2", "_doc").addIds("1", "2", "3", "4")).get(); + searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1", "2", "3", "4")).get(); assertHitCount(searchResponse, 3L); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); } @@ -1338,15 +1303,18 @@ public void testNumericTermsAndRanges() throws Exception { ) ); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource("num_byte", 1, "num_short", 1, "num_integer", 1, "num_long", 1, "num_float", 1, "num_double", 1) .get(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource("num_byte", 2, "num_short", 2, "num_integer", 2, "num_long", 2, "num_float", 2, "num_double", 2) .get(); - client().prepareIndex("test", "type1", "17") + client().prepareIndex("test") + .setId("17") .setSource("num_byte", 17, "num_short", 17, "num_integer", 17, "num_long", 17, "num_float", 17, "num_double", 17) .get(); refresh(); @@ -1452,10 +1420,10 @@ public void testNumericRangeFilter_2826() throws Exception { ) ); - client().prepareIndex("test", "type1", "1").setSource("field1", "test1", "num_long", 1).get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "test1", "num_long", 2).get(); - client().prepareIndex("test", "type1", "3").setSource("field1", "test2", "num_long", 3).get(); - client().prepareIndex("test", "type1", "4").setSource("field1", "test2", "num_long", 4).get(); + client().prepareIndex("test").setId("1").setSource("field1", "test1", "num_long", 1).get(); + client().prepareIndex("test").setId("2").setSource("field1", "test1", "num_long", 2).get(); + client().prepareIndex("test").setId("3").setSource("field1", "test2", "num_long", 3).get(); + client().prepareIndex("test").setId("4").setSource("field1", "test2", "num_long", 4).get(); refresh(); SearchResponse searchResponse = client().prepareSearch("test") @@ -1490,10 +1458,10 @@ public void testMustNot() throws IOException, ExecutionException, InterruptedExc indexRandom( true, - client().prepareIndex("test", "test", "1").setSource("description", "foo other anything bar"), - client().prepareIndex("test", "test", "2").setSource("description", "foo other anything"), - client().prepareIndex("test", "test", "3").setSource("description", "foo other"), - client().prepareIndex("test", "test", "4").setSource("description", "foo") + client().prepareIndex("test").setId("1").setSource("description", "foo other anything bar"), + client().prepareIndex("test").setId("2").setSource("description", "foo other anything"), + client().prepareIndex("test").setId("3").setSource("description", "foo other"), + client().prepareIndex("test").setId("4").setSource("description", "foo") ); SearchResponse searchResponse = client().prepareSearch("test") @@ -1514,7 +1482,7 @@ public void testIntervals() throws InterruptedException { indexRandom( true, - client().prepareIndex("test", "test", "1").setSource("description", "it's cold outside, there's no kind of atmosphere") + client().prepareIndex("test").setId("1").setSource("description", "it's cold outside, there's no kind of atmosphere") ); String json = "{ \"intervals\" : " @@ -1538,10 +1506,10 @@ public void testSimpleSpan() throws IOException, ExecutionException, Interrupted indexRandom( true, - client().prepareIndex("test", "test", "1").setSource("description", "foo other anything bar"), - client().prepareIndex("test", "test", "2").setSource("description", "foo other anything"), - client().prepareIndex("test", "test", "3").setSource("description", "foo other"), - client().prepareIndex("test", "test", "4").setSource("description", "foo") + client().prepareIndex("test").setId("1").setSource("description", "foo other anything bar"), + client().prepareIndex("test").setId("2").setSource("description", "foo other anything"), + client().prepareIndex("test").setId("3").setSource("description", "foo other"), + client().prepareIndex("test").setId("4").setSource("description", "foo") ); SearchResponse searchResponse = client().prepareSearch("test").setQuery(spanOrQuery(spanTermQuery("description", "bar"))).get(); @@ -1556,10 +1524,10 @@ public void testSimpleSpan() throws IOException, ExecutionException, Interrupted public void testSpanMultiTermQuery() throws IOException { createIndex("test"); - client().prepareIndex("test", "test", "1").setSource("description", "foo other anything bar", "count", 1).get(); - client().prepareIndex("test", "test", "2").setSource("description", "foo other anything", "count", 2).get(); - client().prepareIndex("test", "test", "3").setSource("description", "foo other", "count", 3).get(); - client().prepareIndex("test", "test", "4").setSource("description", "fop", "count", 4).get(); + client().prepareIndex("test").setId("1").setSource("description", "foo other anything bar", "count", 1).get(); + client().prepareIndex("test").setId("2").setSource("description", "foo other anything", "count", 2).get(); + client().prepareIndex("test").setId("3").setSource("description", "foo other", "count", 3).get(); + client().prepareIndex("test").setId("4").setSource("description", "fop", "count", 4).get(); refresh(); SearchResponse response = client().prepareSearch("test") @@ -1589,8 +1557,8 @@ public void testSpanMultiTermQuery() throws IOException { public void testSpanNot() throws IOException, ExecutionException, InterruptedException { createIndex("test"); - client().prepareIndex("test", "test", "1").setSource("description", "the quick brown fox jumped over the lazy dog").get(); - client().prepareIndex("test", "test", "2").setSource("description", "the quick black fox leaped over the sleeping dog").get(); + client().prepareIndex("test").setId("1").setSource("description", "the quick brown fox jumped over the lazy dog").get(); + client().prepareIndex("test").setId("2").setSource("description", "the quick black fox leaped over the sleeping dog").get(); refresh(); SearchResponse searchResponse = client().prepareSearch("test") @@ -1657,19 +1625,23 @@ public void testSimpleDFSQuery() throws IOException { ) ); - client().prepareIndex("test", "_doc", "1") + client().prepareIndex("test") + .setId("1") .setRouting("Y") .setSource("online", false, "bs", "Y", "ts", System.currentTimeMillis() - 100, "type", "s") .get(); - client().prepareIndex("test", "_doc", "2") + client().prepareIndex("test") + .setId("2") .setRouting("X") .setSource("online", true, "bs", "X", "ts", System.currentTimeMillis() - 10000000, "type", "s") .get(); - client().prepareIndex("test", "_doc", "3") + client().prepareIndex("test") + .setId("3") .setRouting(randomAlphaOfLength(2)) .setSource("online", false, "ts", System.currentTimeMillis() - 100, "type", "bs") .get(); - client().prepareIndex("test", "_doc", "4") + client().prepareIndex("test") + .setId("4") .setRouting(randomAlphaOfLength(2)) .setSource("online", true, "ts", System.currentTimeMillis() - 123123, "type", "bs") .get(); @@ -1697,7 +1669,7 @@ public void testSimpleDFSQuery() throws IOException { } public void testMultiFieldQueryString() { - client().prepareIndex("test", "s", "1").setSource("field1", "value1", "field2", "value2").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2").setRefreshPolicy(IMMEDIATE).get(); logger.info("regular"); assertHitCount(client().prepareSearch("test").setQuery(queryStringQuery("value1").field("field1").field("field2")).get(), 1); @@ -1720,7 +1692,7 @@ public void testMultiFieldQueryString() { public void testMultiMatchLenientIssue3797() { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("field1", 123, "field2", "value2").get(); + client().prepareIndex("test").setId("1").setSource("field1", 123, "field2", "value2").get(); refresh(); SearchResponse searchResponse = client().prepareSearch("test") @@ -1740,10 +1712,10 @@ public void testMultiMatchLenientIssue3797() { public void testMinScore() throws ExecutionException, InterruptedException { createIndex("test"); - client().prepareIndex("test", "test", "1").setSource("score", 1.5).get(); - client().prepareIndex("test", "test", "2").setSource("score", 1.0).get(); - client().prepareIndex("test", "test", "3").setSource("score", 2.0).get(); - client().prepareIndex("test", "test", "4").setSource("score", 0.5).get(); + client().prepareIndex("test").setId("1").setSource("score", 1.5).get(); + client().prepareIndex("test").setId("2").setSource("score", 1.0).get(); + client().prepareIndex("test").setId("3").setSource("score", 2.0).get(); + client().prepareIndex("test").setId("4").setSource("score", 0.5).get(); refresh(); SearchResponse searchResponse = client().prepareSearch("test") @@ -1757,8 +1729,8 @@ public void testMinScore() throws ExecutionException, InterruptedException { public void testQueryStringWithSlopAndFields() { assertAcked(prepareCreate("test")); - client().prepareIndex("test", "_doc", "1").setSource("desc", "one two three", "type", "customer").get(); - client().prepareIndex("test", "_doc", "2").setSource("desc", "one two three", "type", "product").get(); + client().prepareIndex("test").setId("1").setSource("desc", "one two three", "type", "customer").get(); + client().prepareIndex("test").setId("2").setSource("desc", "one two three", "type", "product").get(); refresh(); { SearchResponse searchResponse = client().prepareSearch("test") @@ -1798,17 +1770,15 @@ public void testQueryStringWithSlopAndFields() { public void testDateProvidedAsNumber() throws InterruptedException { createIndex("test"); - assertAcked( - client().admin().indices().preparePutMapping("test").setType("type").setSource("field", "type=date,format=epoch_millis").get() - ); + assertAcked(client().admin().indices().preparePutMapping("test").setSource("field", "type=date,format=epoch_millis").get()); indexRandom( true, - client().prepareIndex("test", "type", "1").setSource("field", 1000000000001L), - client().prepareIndex("test", "type", "2").setSource("field", 1000000000000L), - client().prepareIndex("test", "type", "3").setSource("field", 999999999999L), - client().prepareIndex("test", "type", "4").setSource("field", 1000000000002L), - client().prepareIndex("test", "type", "5").setSource("field", 1000000000003L), - client().prepareIndex("test", "type", "6").setSource("field", 999999999999L) + client().prepareIndex("test").setId("1").setSource("field", 1000000000001L), + client().prepareIndex("test").setId("2").setSource("field", 1000000000000L), + client().prepareIndex("test").setId("3").setSource("field", 999999999999L), + client().prepareIndex("test").setId("4").setSource("field", 1000000000002L), + client().prepareIndex("test").setId("5").setSource("field", 1000000000003L), + client().prepareIndex("test").setId("6").setSource("field", 999999999999L) ); assertHitCount(client().prepareSearch("test").setSize(0).setQuery(rangeQuery("field").gte(1000000000000L)).get(), 4); @@ -1820,11 +1790,12 @@ public void testRangeQueryWithTimeZone() throws Exception { indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("date", "2014-01-01", "num", 1), - client().prepareIndex("test", "type1", "2").setSource("date", "2013-12-31T23:00:00", "num", 2), - client().prepareIndex("test", "type1", "3").setSource("date", "2014-01-01T01:00:00", "num", 3), + client().prepareIndex("test").setId("1").setSource("date", "2014-01-01", "num", 1), + client().prepareIndex("test").setId("2").setSource("date", "2013-12-31T23:00:00", "num", 2), + client().prepareIndex("test").setId("3").setSource("date", "2014-01-01T01:00:00", "num", 3), // Now in UTC+1 - client().prepareIndex("test", "type1", "4") + client().prepareIndex("test") + .setId("4") .setSource("date", Instant.now().atZone(ZoneOffset.ofHours(1)).toInstant().toEpochMilli(), "num", 4) ); @@ -1921,8 +1892,8 @@ public void testRangeQueryWithLocaleMapping() throws Exception { indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("date_field", "Mi, 06 Dez 2000 02:55:00 -0800"), - client().prepareIndex("test", "type1", "2").setSource("date_field", "Do, 07 Dez 2000 02:55:00 -0800") + client().prepareIndex("test").setId("1").setSource("date_field", "Mi, 06 Dez 2000 02:55:00 -0800"), + client().prepareIndex("test").setId("2").setSource("date_field", "Do, 07 Dez 2000 02:55:00 -0800") ); SearchResponse searchResponse = client().prepareSearch("test") @@ -1938,7 +1909,7 @@ public void testRangeQueryWithLocaleMapping() throws Exception { public void testSearchEmptyDoc() { assertAcked(prepareCreate("test").setSettings("{\"index.analysis.analyzer.default.type\":\"keyword\"}", XContentType.JSON)); - client().prepareIndex("test", "type1", "1").setSource("{}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{}", XContentType.JSON).get(); refresh(); assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L); @@ -1948,8 +1919,8 @@ public void testMatchPhrasePrefixQuery() throws ExecutionException, InterruptedE createIndex("test1"); indexRandom( true, - client().prepareIndex("test1", "type1", "1").setSource("field", "Johnnie Walker Black Label"), - client().prepareIndex("test1", "type1", "2").setSource("field", "trying out OpenSearch") + client().prepareIndex("test1").setId("1").setSource("field", "Johnnie Walker Black Label"), + client().prepareIndex("test1").setId("2").setSource("field", "trying out OpenSearch") ); SearchResponse searchResponse = client().prepareSearch() @@ -1967,7 +1938,7 @@ public void testMatchPhrasePrefixQuery() throws ExecutionException, InterruptedE public void testQueryStringParserCache() throws Exception { createIndex("test"); - indexRandom(true, false, client().prepareIndex("test", "type", "1").setSource("nameTokens", "xyz")); + indexRandom(true, false, client().prepareIndex("test").setId("1").setSource("nameTokens", "xyz")); SearchResponse response = client().prepareSearch("test") .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) @@ -1993,7 +1964,8 @@ public void testQueryStringParserCache() throws Exception { public void testRangeQueryRangeFields_24744() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", "int_range", "type=integer_range")); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().startObject("int_range").field("gte", 10).field("lte", 20).endObject().endObject()) .get(); refresh(); @@ -2003,41 +1975,6 @@ public void testRangeQueryRangeFields_24744() throws Exception { assertHitCount(searchResponse, 1); } - public void testRangeQueryTypeField_31476() throws Exception { - assertAcked(prepareCreate("test").addMapping("foo", "field", "type=keyword")); - - client().prepareIndex("test", "foo", "1").setSource("field", "value").get(); - refresh(); - - RangeQueryBuilder range = new RangeQueryBuilder("_type").from("ape").to("zebra"); - SearchResponse searchResponse = client().prepareSearch("test").setQuery(range).get(); - assertHitCount(searchResponse, 1); - - range = new RangeQueryBuilder("_type").from("monkey").to("zebra"); - searchResponse = client().prepareSearch("test").setQuery(range).get(); - assertHitCount(searchResponse, 0); - - range = new RangeQueryBuilder("_type").from("ape").to("donkey"); - searchResponse = client().prepareSearch("test").setQuery(range).get(); - assertHitCount(searchResponse, 0); - - range = new RangeQueryBuilder("_type").from("ape").to("foo").includeUpper(false); - searchResponse = client().prepareSearch("test").setQuery(range).get(); - assertHitCount(searchResponse, 0); - - range = new RangeQueryBuilder("_type").from("ape").to("foo").includeUpper(true); - searchResponse = client().prepareSearch("test").setQuery(range).get(); - assertHitCount(searchResponse, 1); - - range = new RangeQueryBuilder("_type").from("foo").to("zebra").includeLower(false); - searchResponse = client().prepareSearch("test").setQuery(range).get(); - assertHitCount(searchResponse, 0); - - range = new RangeQueryBuilder("_type").from("foo").to("zebra").includeLower(true); - searchResponse = client().prepareSearch("test").setQuery(range).get(); - assertHitCount(searchResponse, 1); - } - public void testNestedQueryWithFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() @@ -2097,10 +2034,7 @@ public void testFieldAliasesForMetaFields() throws Exception { .endObject(); assertAcked(prepareCreate("test").addMapping("type", mapping)); - IndexRequestBuilder indexRequest = client().prepareIndex("test", "type") - .setId("1") - .setRouting("custom") - .setSource("field", "value"); + IndexRequestBuilder indexRequest = client().prepareIndex("test").setId("1").setRouting("custom").setSource("field", "value"); indexRandom(true, false, indexRequest); client().admin() .cluster() @@ -2143,7 +2077,7 @@ public void testWildcardQueryNormalizationOnKeywordField() { .build() ).addMapping("_doc", "field1", "type=keyword,normalizer=lowercase_normalizer") ); - client().prepareIndex("test", "_doc", "1").setSource("field1", "Bbb Aaa").get(); + client().prepareIndex("test").setId("1").setSource("field1", "Bbb Aaa").get(); refresh(); { @@ -2170,7 +2104,7 @@ public void testWildcardQueryNormalizationOnTextField() { .build() ).addMapping("_doc", "field1", "type=text,analyzer=lowercase_analyzer") ); - client().prepareIndex("test", "_doc", "1").setSource("field1", "Bbb Aaa").get(); + client().prepareIndex("test").setId("1").setSource("field1", "Bbb Aaa").get(); refresh(); { @@ -2198,7 +2132,7 @@ public void testWildcardQueryNormalizationKeywordSpecialCharacters() { .build() ).addMapping("_doc", "field", "type=keyword,normalizer=no_wildcard") ); - client().prepareIndex("test", "_doc", "1").setSource("field", "label-1").get(); + client().prepareIndex("test").setId("1").setSource("field", "label-1").get(); refresh(); WildcardQueryBuilder wildCardQuery = wildcardQuery("field", "la*"); @@ -2252,7 +2186,7 @@ public Map> getTokenizers() { */ public void testIssueFuzzyInsideSpanMulti() { createIndex("test"); - client().prepareIndex("test", "_doc", "1").setSource("field", "foobarbaz").get(); + client().prepareIndex("test").setId("1").setSource("field", "foobarbaz").get(); ensureGreen(); refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java index 1e74f9a84e863..c53eda63f155f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java @@ -124,12 +124,12 @@ public void testSimpleQueryString() throws ExecutionException, InterruptedExcept indexRandom( true, false, - client().prepareIndex("test", "type1", "1").setSource("body", "foo"), - client().prepareIndex("test", "type1", "2").setSource("body", "bar"), - client().prepareIndex("test", "type1", "3").setSource("body", "foo bar"), - client().prepareIndex("test", "type1", "4").setSource("body", "quux baz eggplant"), - client().prepareIndex("test", "type1", "5").setSource("body", "quux baz spaghetti"), - client().prepareIndex("test", "type1", "6").setSource("otherbody", "spaghetti") + client().prepareIndex("test").setId("1").setSource("body", "foo"), + client().prepareIndex("test").setId("2").setSource("body", "bar"), + client().prepareIndex("test").setId("3").setSource("body", "foo bar"), + client().prepareIndex("test").setId("4").setSource("body", "quux baz eggplant"), + client().prepareIndex("test").setId("5").setSource("body", "quux baz spaghetti"), + client().prepareIndex("test").setId("6").setSource("otherbody", "spaghetti") ); SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar")).get(); @@ -175,10 +175,10 @@ public void testSimpleQueryStringMinimumShouldMatch() throws Exception { indexRandom( true, false, - client().prepareIndex("test", "type1", "1").setSource("body", "foo"), - client().prepareIndex("test", "type1", "2").setSource("body", "bar"), - client().prepareIndex("test", "type1", "3").setSource("body", "foo bar"), - client().prepareIndex("test", "type1", "4").setSource("body", "foo baz bar") + client().prepareIndex("test").setId("1").setSource("body", "foo"), + client().prepareIndex("test").setId("2").setSource("body", "bar"), + client().prepareIndex("test").setId("3").setSource("body", "foo bar"), + client().prepareIndex("test").setId("4").setSource("body", "foo baz bar") ); logger.info("--> query 1"); @@ -211,10 +211,10 @@ public void testSimpleQueryStringMinimumShouldMatch() throws Exception { indexRandom( true, false, - client().prepareIndex("test", "type1", "5").setSource("body2", "foo", "other", "foo"), - client().prepareIndex("test", "type1", "6").setSource("body2", "bar", "other", "foo"), - client().prepareIndex("test", "type1", "7").setSource("body2", "foo bar", "other", "foo"), - client().prepareIndex("test", "type1", "8").setSource("body2", "foo baz bar", "other", "foo") + client().prepareIndex("test").setId("5").setSource("body2", "foo", "other", "foo"), + client().prepareIndex("test").setId("6").setSource("body2", "bar", "other", "foo"), + client().prepareIndex("test").setId("7").setSource("body2", "foo bar", "other", "foo"), + client().prepareIndex("test").setId("8").setSource("body2", "foo baz bar", "other", "foo") ); logger.info("--> query 5"); @@ -257,14 +257,14 @@ public void testNestedFieldSimpleQueryString() throws IOException { .endObject() ) ); - client().prepareIndex("test", "type1", "1").setSource("body", "foo bar baz").get(); + client().prepareIndex("test").setId("1").setSource("body", "foo bar baz").get(); refresh(); SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body")).get(); assertHitCount(searchResponse, 1L); assertSearchHits(searchResponse, "1"); - searchResponse = client().prepareSearch().setTypes("type1").setQuery(simpleQueryStringQuery("foo bar baz").field("body")).get(); + searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body")).get(); assertHitCount(searchResponse, 1L); assertSearchHits(searchResponse, "1"); @@ -272,7 +272,7 @@ public void testNestedFieldSimpleQueryString() throws IOException { assertHitCount(searchResponse, 1L); assertSearchHits(searchResponse, "1"); - searchResponse = client().prepareSearch().setTypes("type1").setQuery(simpleQueryStringQuery("foo bar baz").field("body.sub")).get(); + searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body.sub")).get(); assertHitCount(searchResponse, 1L); assertSearchHits(searchResponse, "1"); } @@ -281,12 +281,12 @@ public void testSimpleQueryStringFlags() throws ExecutionException, InterruptedE createIndex("test"); indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("body", "foo"), - client().prepareIndex("test", "type1", "2").setSource("body", "bar"), - client().prepareIndex("test", "type1", "3").setSource("body", "foo bar"), - client().prepareIndex("test", "type1", "4").setSource("body", "quux baz eggplant"), - client().prepareIndex("test", "type1", "5").setSource("body", "quux baz spaghetti"), - client().prepareIndex("test", "type1", "6").setSource("otherbody", "spaghetti") + client().prepareIndex("test").setId("1").setSource("body", "foo"), + client().prepareIndex("test").setId("2").setSource("body", "bar"), + client().prepareIndex("test").setId("3").setSource("body", "foo bar"), + client().prepareIndex("test").setId("4").setSource("body", "quux baz eggplant"), + client().prepareIndex("test").setId("5").setSource("body", "quux baz spaghetti"), + client().prepareIndex("test").setId("6").setSource("otherbody", "spaghetti") ); SearchResponse searchResponse = client().prepareSearch() @@ -339,8 +339,8 @@ public void testSimpleQueryStringLenient() throws ExecutionException, Interrupte createIndex("test1", "test2"); indexRandom( true, - client().prepareIndex("test1", "type1", "1").setSource("field", "foo"), - client().prepareIndex("test2", "type1", "10").setSource("field", 5) + client().prepareIndex("test1").setId("1").setSource("field", "foo"), + client().prepareIndex("test2").setId("10").setSource("field", 5) ); refresh(); @@ -362,8 +362,8 @@ public void testSimpleQueryStringLenient() throws ExecutionException, Interrupte public void testLenientFlagBeingTooLenient() throws Exception { indexRandom( true, - client().prepareIndex("test", "_doc", "1").setSource("num", 1, "body", "foo bar baz"), - client().prepareIndex("test", "_doc", "2").setSource("num", 2, "body", "eggplant spaghetti lasagna") + client().prepareIndex("test").setId("1").setSource("num", 1, "body", "foo bar baz"), + client().prepareIndex("test").setId("2").setSource("num", 2, "body", "eggplant spaghetti lasagna") ); BoolQueryBuilder q = boolQuery().should(simpleQueryStringQuery("bar").field("num").field("body").lenient(true)); @@ -379,7 +379,6 @@ public void testSimpleQueryStringAnalyzeWildcard() throws ExecutionException, In String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("location") .field("type", "text") @@ -387,15 +386,11 @@ public void testSimpleQueryStringAnalyzeWildcard() throws ExecutionException, In .endObject() .endObject() .endObject() - .endObject() ); - CreateIndexRequestBuilder mappingRequest = client().admin() - .indices() - .prepareCreate("test1") - .addMapping("type1", mapping, XContentType.JSON); + CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test1").setMapping(mapping); mappingRequest.get(); - indexRandom(true, client().prepareIndex("test1", "type1", "1").setSource("location", "Köln")); + indexRandom(true, client().prepareIndex("test1").setId("1").setSource("location", "Köln")); refresh(); SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("Köln*").field("location")).get(); @@ -405,8 +400,8 @@ public void testSimpleQueryStringAnalyzeWildcard() throws ExecutionException, In } public void testSimpleQueryStringUsesFieldAnalyzer() throws Exception { - client().prepareIndex("test", "type1", "1").setSource("foo", 123, "bar", "abc").get(); - client().prepareIndex("test", "type1", "2").setSource("foo", 234, "bar", "bcd").get(); + client().prepareIndex("test").setId("1").setSource("foo", 123, "bar", "abc").get(); + client().prepareIndex("test").setId("2").setSource("foo", 234, "bar", "bcd").get(); refresh(); @@ -416,8 +411,8 @@ public void testSimpleQueryStringUsesFieldAnalyzer() throws Exception { } public void testSimpleQueryStringOnIndexMetaField() throws Exception { - client().prepareIndex("test", "type1", "1").setSource("foo", 123, "bar", "abc").get(); - client().prepareIndex("test", "type1", "2").setSource("foo", 234, "bar", "bcd").get(); + client().prepareIndex("test").setId("1").setSource("foo", 123, "bar", "abc").get(); + client().prepareIndex("test").setId("2").setSource("foo", 234, "bar", "bcd").get(); refresh(); @@ -431,7 +426,6 @@ public void testEmptySimpleQueryStringWithAnalysis() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("body") .field("type", "text") @@ -439,15 +433,11 @@ public void testEmptySimpleQueryStringWithAnalysis() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - CreateIndexRequestBuilder mappingRequest = client().admin() - .indices() - .prepareCreate("test1") - .addMapping("type1", mapping, XContentType.JSON); + CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test1").setMapping(mapping); mappingRequest.get(); - indexRandom(true, client().prepareIndex("test1", "type1", "1").setSource("body", "Some Text")); + indexRandom(true, client().prepareIndex("test1").setId("1").setSource("body", "Some Text")); refresh(); SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("the*").field("body")).get(); @@ -461,9 +451,9 @@ public void testBasicAllQuery() throws Exception { ensureGreen("test"); List reqs = new ArrayList<>(); - reqs.add(client().prepareIndex("test", "_doc", "1").setSource("f1", "foo bar baz")); - reqs.add(client().prepareIndex("test", "_doc", "2").setSource("f2", "Bar")); - reqs.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "foo bar baz")); + reqs.add(client().prepareIndex("test").setId("1").setSource("f1", "foo bar baz")); + reqs.add(client().prepareIndex("test").setId("2").setSource("f2", "Bar")); + reqs.add(client().prepareIndex("test").setId("3").setSource("f3", "foo bar baz")); indexRandom(true, false, reqs); SearchResponse resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("foo")).get(); @@ -485,8 +475,8 @@ public void testWithDate() throws Exception { ensureGreen("test"); List reqs = new ArrayList<>(); - reqs.add(client().prepareIndex("test", "_doc", "1").setSource("f1", "foo", "f_date", "2015/09/02")); - reqs.add(client().prepareIndex("test", "_doc", "2").setSource("f1", "bar", "f_date", "2015/09/01")); + reqs.add(client().prepareIndex("test").setId("1").setSource("f1", "foo", "f_date", "2015/09/02")); + reqs.add(client().prepareIndex("test").setId("2").setSource("f1", "bar", "f_date", "2015/09/01")); indexRandom(true, false, reqs); SearchResponse resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("foo bar")).get(); @@ -513,10 +503,10 @@ public void testWithLotsOfTypes() throws Exception { List reqs = new ArrayList<>(); reqs.add( - client().prepareIndex("test", "_doc", "1").setSource("f1", "foo", "f_date", "2015/09/02", "f_float", "1.7", "f_ip", "127.0.0.1") + client().prepareIndex("test").setId("1").setSource("f1", "foo", "f_date", "2015/09/02", "f_float", "1.7", "f_ip", "127.0.0.1") ); reqs.add( - client().prepareIndex("test", "_doc", "2").setSource("f1", "bar", "f_date", "2015/09/01", "f_float", "1.8", "f_ip", "127.0.0.2") + client().prepareIndex("test").setId("2").setSource("f1", "bar", "f_date", "2015/09/01", "f_float", "1.8", "f_ip", "127.0.0.2") ); indexRandom(true, false, reqs); @@ -544,7 +534,7 @@ public void testDocWithAllTypes() throws Exception { List reqs = new ArrayList<>(); String docBody = copyToStringFromClasspath("/org/opensearch/search/query/all-example-document.json"); - reqs.add(client().prepareIndex("test", "_doc", "1").setSource(docBody, XContentType.JSON)); + reqs.add(client().prepareIndex("test").setId("1").setSource(docBody, XContentType.JSON)); indexRandom(true, false, reqs); SearchResponse resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("foo")).get(); @@ -588,9 +578,9 @@ public void testKeywordWithWhitespace() throws Exception { ensureGreen("test"); List reqs = new ArrayList<>(); - reqs.add(client().prepareIndex("test", "_doc", "1").setSource("f2", "Foo Bar")); - reqs.add(client().prepareIndex("test", "_doc", "2").setSource("f1", "bar")); - reqs.add(client().prepareIndex("test", "_doc", "3").setSource("f1", "foo bar")); + reqs.add(client().prepareIndex("test").setId("1").setSource("f2", "Foo Bar")); + reqs.add(client().prepareIndex("test").setId("2").setSource("f1", "bar")); + reqs.add(client().prepareIndex("test").setId("3").setSource("f1", "foo bar")); indexRandom(true, false, reqs); SearchResponse resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("foo")).get(); @@ -632,7 +622,7 @@ public void testLimitOnExpandedFields() throws Exception { ).addMapping("type1", builder) ); - client().prepareIndex("toomanyfields", "type1", "1").setSource("field1", "foo bar baz").get(); + client().prepareIndex("toomanyfields").setId("1").setSource("field1", "foo bar baz").get(); refresh(); doAssertLimitExceededException("*", CLUSTER_MAX_CLAUSE_COUNT + 1); @@ -657,9 +647,9 @@ public void testFieldAlias() throws Exception { ensureGreen("test"); List indexRequests = new ArrayList<>(); - indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); - indexRequests.add(client().prepareIndex("test", "_doc", "2").setSource("f3", "value", "f2", "two")); - indexRequests.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "another value", "f2", "three")); + indexRequests.add(client().prepareIndex("test").setId("1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test").setId("2").setSource("f3", "value", "f2", "two")); + indexRequests.add(client().prepareIndex("test").setId("3").setSource("f3", "another value", "f2", "three")); indexRandom(true, false, indexRequests); SearchResponse response = client().prepareSearch("test").setQuery(simpleQueryStringQuery("value").field("f3_alias")).get(); @@ -675,9 +665,9 @@ public void testFieldAliasWithWildcardField() throws Exception { ensureGreen("test"); List indexRequests = new ArrayList<>(); - indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); - indexRequests.add(client().prepareIndex("test", "_doc", "2").setSource("f3", "value", "f2", "two")); - indexRequests.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "another value", "f2", "three")); + indexRequests.add(client().prepareIndex("test").setId("1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test").setId("2").setSource("f3", "value", "f2", "two")); + indexRequests.add(client().prepareIndex("test").setId("3").setSource("f3", "another value", "f2", "three")); indexRandom(true, false, indexRequests); SearchResponse response = client().prepareSearch("test").setQuery(simpleQueryStringQuery("value").field("f3_*")).get(); @@ -693,7 +683,7 @@ public void testFieldAliasOnDisallowedFieldType() throws Exception { ensureGreen("test"); List indexRequests = new ArrayList<>(); - indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test").setId("1").setSource("f3", "text", "f2", "one")); indexRandom(true, false, indexRequests); // The wildcard field matches aliases for both a text and boolean field. diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java index 13dc97eb3daf9..3b120dcab22f2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java @@ -133,11 +133,13 @@ public void testCustomScriptBinaryField() throws Exception { .addMapping("my-type", createMappingSource("binary")) .setSettings(indexSettings()) ); - client().prepareIndex("my-index", "my-type", "1") + client().prepareIndex("my-index") + .setId("1") .setSource(jsonBuilder().startObject().field("binaryData", Base64.getEncoder().encodeToString(randomBytesDoc1)).endObject()) .get(); flush(); - client().prepareIndex("my-index", "my-type", "2") + client().prepareIndex("my-index") + .setId("2") .setSource(jsonBuilder().startObject().field("binaryData", Base64.getEncoder().encodeToString(randomBytesDoc2)).endObject()) .get(); flush(); @@ -181,15 +183,18 @@ private XContentBuilder createMappingSource(String fieldType) throws IOException public void testCustomScriptBoost() throws Exception { createIndex("test"); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 1.0f).endObject()) .get(); flush(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 2.0f).endObject()) .get(); flush(); - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 3.0f).endObject()) .get(); refresh(); @@ -244,7 +249,7 @@ public void testDisallowExpensiveQueries() { assertAcked(prepareCreate("test-index").addMapping("_doc", "num1", "type=double")); int docCount = 10; for (int i = 1; i <= docCount; i++) { - client().prepareIndex("test-index", "_doc").setId("" + i).setSource("num1", i).get(); + client().prepareIndex("test-index").setId("" + i).setSource("num1", i).get(); } refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java index 33899a1fb152c..be55193da30cc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java @@ -155,7 +155,7 @@ private TestContext create(SearchType... searchTypes) throws Exception { } for (int i = 1; i <= numDocs; i++) { - IndexRequestBuilder indexRequestBuilder = client().prepareIndex("index", "type", String.valueOf(i)); + IndexRequestBuilder indexRequestBuilder = client().prepareIndex("index").setId(String.valueOf(i)); if (missingDocs.contains(i)) { indexRequestBuilder.setSource("x", "y"); } else { @@ -230,7 +230,7 @@ private int createIndex(boolean singleShard) throws Exception { IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; ++i) { - builders[i] = client().prepareIndex("test", "type", Integer.toString(i)).setSource("foo", random().nextBoolean()); + builders[i] = client().prepareIndex("test").setId(Integer.toString(i)).setSource("foo", random().nextBoolean()); } indexRandom(true, builders); return numDocs; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java index eeb4b9d156517..5c56671384868 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java @@ -106,7 +106,8 @@ public void testSimpleScrollQueryThenFetch() throws Exception { client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().get(); for (int i = 0; i < 100; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource(jsonBuilder().startObject().field("field", i).endObject()) .get(); } @@ -161,7 +162,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E } else if (i > 60) { routing = "2"; } - client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", i).setRouting(routing).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", i).setRouting(routing).get(); } client().admin().indices().prepareRefresh().get(); @@ -220,7 +221,8 @@ public void testScrollAndUpdateIndex() throws Exception { client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().get(); for (int i = 0; i < 500; i++) { - client().prepareIndex("test", "tweet", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource( jsonBuilder().startObject() .field("user", "foobar") @@ -262,7 +264,7 @@ public void testScrollAndUpdateIndex() throws Exception { for (SearchHit searchHit : searchResponse.getHits().getHits()) { Map map = searchHit.getSourceAsMap(); map.put("message", "update"); - client().prepareIndex("test", "tweet", searchHit.getId()).setSource(map).get(); + client().prepareIndex("test").setId(searchHit.getId()).setSource(map).get(); } searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); } while (searchResponse.getHits().getHits().length > 0); @@ -297,7 +299,8 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().get(); for (int i = 0; i < 100; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource(jsonBuilder().startObject().field("field", i).endObject()) .get(); } @@ -416,7 +419,8 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().get(); for (int i = 0; i < 100; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource(jsonBuilder().startObject().field("field", i).endObject()) .get(); } @@ -490,7 +494,7 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { * Tests that we use an optimization shrinking the batch to the size of the shard. Thus the Integer.MAX_VALUE window doesn't OOM us. */ public void testDeepScrollingDoesNotBlowUp() throws Exception { - client().prepareIndex("index", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).execute().get(); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).execute().get(); /* * Disable the max result window setting for this test because it'll reject the search's unreasonable batch size. We want * unreasonable batch sizes to just OOM. @@ -521,7 +525,7 @@ public void testDeepScrollingDoesNotBlowUp() throws Exception { } public void testThatNonExistingScrollIdReturnsCorrectException() throws Exception { - client().prepareIndex("index", "type", "1").setSource("field", "value").execute().get(); + client().prepareIndex("index").setId("1").setSource("field", "value").execute().get(); refresh(); SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); @@ -539,11 +543,10 @@ public void testStringSortMissingAscTerminates() throws Exception { Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ).addMapping("test", "no_field", "type=keyword", "some_field", "type=keyword") ); - client().prepareIndex("test", "test", "1").setSource("some_field", "test").get(); + client().prepareIndex("test").setId("1").setSource("some_field", "test").get(); refresh(); SearchResponse response = client().prepareSearch("test") - .setTypes("test") .addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_last")) .setScroll("1m") .get(); @@ -556,7 +559,6 @@ public void testStringSortMissingAscTerminates() throws Exception { assertNoSearchHits(response); response = client().prepareSearch("test") - .setTypes("test") .addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_first")) .setScroll("1m") .get(); @@ -571,7 +573,7 @@ public void testStringSortMissingAscTerminates() throws Exception { public void testCloseAndReopenOrDeleteWithActiveScroll() { createIndex("test"); for (int i = 0; i < 100; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", i).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", i).get(); } refresh(); SearchResponse searchResponse = client().prepareSearch() @@ -662,7 +664,8 @@ public void testScrollInvalidDefaultKeepAlive() throws IOException { public void testInvalidScrollKeepAlive() throws IOException { createIndex("test"); for (int i = 0; i < 2; i++) { - client().prepareIndex("test", "type1", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource(jsonBuilder().startObject().field("field", i).endObject()) .get(); } @@ -717,9 +720,9 @@ public void testScrollRewrittenToMatchNoDocs() { .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards)) .addMapping("_doc", "created_date", "type=date,format=yyyy-MM-dd") ); - client().prepareIndex("test", "_doc").setId("1").setSource("created_date", "2020-01-01").get(); - client().prepareIndex("test", "_doc").setId("2").setSource("created_date", "2020-01-02").get(); - client().prepareIndex("test", "_doc").setId("3").setSource("created_date", "2020-01-03").get(); + client().prepareIndex("test").setId("1").setSource("created_date", "2020-01-01").get(); + client().prepareIndex("test").setId("2").setSource("created_date", "2020-01-02").get(); + client().prepareIndex("test").setId("3").setSource("created_date", "2020-01-03").get(); client().admin().indices().prepareRefresh("test").get(); SearchResponse resp = null; try { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollWithFailingNodesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollWithFailingNodesIT.java index b5609d9e51016..a56f8667fab48 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollWithFailingNodesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollWithFailingNodesIT.java @@ -77,7 +77,7 @@ public void testScanScrollWithShardExceptions() throws Exception { List writes = new ArrayList<>(); for (int i = 0; i < 100; i++) { - writes.add(client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject().field("field", i).endObject())); + writes.add(client().prepareIndex("test").setSource(jsonBuilder().startObject().field("field", i).endObject())); } indexRandom(false, writes); refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java index 8270496943cdb..b88e56b4f675d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java @@ -67,7 +67,7 @@ public void testsShouldFail() throws Exception { client().admin().indices().prepareCreate("test").addMapping("type1", "field1", "type=long", "field2", "type=keyword").get() ); ensureGreen(); - indexRandom(true, client().prepareIndex("test", "type1", "0").setSource("field1", 0, "field2", "toto")); + indexRandom(true, client().prepareIndex("test").setId("0").setSource("field1", 0, "field2", "toto")); { SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, @@ -163,8 +163,8 @@ public void testWithNullStrings() throws InterruptedException { ensureGreen(); indexRandom( true, - client().prepareIndex("test", "type1", "0").setSource("field1", 0), - client().prepareIndex("test", "type1", "1").setSource("field1", 100, "field2", "toto") + client().prepareIndex("test").setId("0").setSource("field1", 0), + client().prepareIndex("test").setId("1").setSource("field1", 100, "field2", "toto") ); SearchResponse searchResponse = client().prepareSearch("test") .addSort("field1", SortOrder.ASC) @@ -263,7 +263,7 @@ private void assertSearchFromWithSortValues(String indexName, String typeName, L builder.field("field" + Integer.toString(j), documents.get(i).get(j)); } builder.endObject(); - requests.add(client().prepareIndex(INDEX_NAME, TYPE_NAME, Integer.toString(i)).setSource(builder)); + requests.add(client().prepareIndex(INDEX_NAME).setId(Integer.toString(i)).setSource(builder)); } indexRandom(true, requests); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java index 661c5bf563e9f..0652b38228ec5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java @@ -43,6 +43,7 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.index.IndexSettings; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.rest.RestStatus; @@ -87,12 +88,12 @@ public void testSearchRandomPreference() throws InterruptedException, ExecutionE createIndex("test"); indexRandom( true, - client().prepareIndex("test", "type", "1").setSource("field", "value"), - client().prepareIndex("test", "type", "2").setSource("field", "value"), - client().prepareIndex("test", "type", "3").setSource("field", "value"), - client().prepareIndex("test", "type", "4").setSource("field", "value"), - client().prepareIndex("test", "type", "5").setSource("field", "value"), - client().prepareIndex("test", "type", "6").setSource("field", "value") + client().prepareIndex("test").setId("1").setSource("field", "value"), + client().prepareIndex("test").setId("2").setSource("field", "value"), + client().prepareIndex("test").setId("3").setSource("field", "value"), + client().prepareIndex("test").setId("4").setSource("field", "value"), + client().prepareIndex("test").setId("5").setSource("field", "value"), + client().prepareIndex("test").setId("6").setSource("field", "value") ); int iters = scaledRandomIntBetween(10, 20); @@ -118,11 +119,10 @@ public void testSimpleIp() throws Exception { client().admin() .indices() .preparePutMapping("test") - .setType("type1") .setSource( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("from") .field("type", "ip") @@ -136,10 +136,7 @@ public void testSimpleIp() throws Exception { ) .get(); - client().prepareIndex("test", "type1", "1") - .setSource("from", "192.168.0.5", "to", "192.168.0.10") - .setRefreshPolicy(IMMEDIATE) - .get(); + client().prepareIndex("test").setId("1").setSource("from", "192.168.0.5", "to", "192.168.0.10").setRefreshPolicy(IMMEDIATE).get(); SearchResponse search = client().prepareSearch() .setQuery(boolQuery().must(rangeQuery("from").lte("192.168.0.7")).must(rangeQuery("to").gte("192.168.0.7"))) @@ -154,11 +151,10 @@ public void testIpCidr() throws Exception { client().admin() .indices() .preparePutMapping("test") - .setType("type1") .setSource( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("ip") .field("type", "ip") @@ -170,11 +166,11 @@ public void testIpCidr() throws Exception { .get(); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("ip", "192.168.0.1").get(); - client().prepareIndex("test", "type1", "2").setSource("ip", "192.168.0.2").get(); - client().prepareIndex("test", "type1", "3").setSource("ip", "192.168.0.3").get(); - client().prepareIndex("test", "type1", "4").setSource("ip", "192.168.1.4").get(); - client().prepareIndex("test", "type1", "5").setSource("ip", "2001:db8::ff00:42:8329").get(); + client().prepareIndex("test").setId("1").setSource("ip", "192.168.0.1").get(); + client().prepareIndex("test").setId("2").setSource("ip", "192.168.0.2").get(); + client().prepareIndex("test").setId("3").setSource("ip", "192.168.0.3").get(); + client().prepareIndex("test").setId("4").setSource("ip", "192.168.1.4").get(); + client().prepareIndex("test").setId("5").setSource("ip", "2001:db8::ff00:42:8329").get(); refresh(); SearchResponse search = client().prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.0.1"))).get(); @@ -217,7 +213,7 @@ public void testIpCidr() throws Exception { public void testSimpleId() { createIndex("test"); - client().prepareIndex("test", "type", "XXX1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("XXX1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); // id is not indexed, but lets see that we automatically convert to SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.termQuery("_id", "XXX1")).get(); assertHitCount(searchResponse, 1L); @@ -228,8 +224,8 @@ public void testSimpleId() { public void testSimpleDateRange() throws Exception { createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("field", "2010-01-05T02:00").get(); - client().prepareIndex("test", "type1", "2").setSource("field", "2010-01-06T02:00").get(); + client().prepareIndex("test").setId("1").setSource("field", "2010-01-05T02:00").get(); + client().prepareIndex("test").setId("2").setSource("field", "2010-01-06T02:00").get(); ensureGreen(); refresh(); SearchResponse searchResponse = client().prepareSearch("test") @@ -270,7 +266,7 @@ public void testSimpleTerminateAfterCount() throws Exception { for (int i = 1; i <= max; i++) { String id = String.valueOf(i); - docbuilders.add(client().prepareIndex("test", "type1", id).setSource("field", i)); + docbuilders.add(client().prepareIndex("test").setId(id).setSource("field", i)); } indexRandom(true, docbuilders); @@ -299,14 +295,14 @@ public void testSimpleTerminateAfterCount() throws Exception { public void testSimpleIndexSortEarlyTerminate() throws Exception { prepareCreate("test").setSettings( Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0).put("index.sort.field", "rank") - ).addMapping("type1", "rank", "type=integer").get(); + ).addMapping(MapperService.SINGLE_MAPPING_NAME, "rank", "type=integer").get(); ensureGreen(); int max = randomIntBetween(3, 29); List docbuilders = new ArrayList<>(max); for (int i = max - 1; i >= 0; i--) { String id = String.valueOf(i); - docbuilders.add(client().prepareIndex("test", "type1", id).setSource("rank", i)); + docbuilders.add(client().prepareIndex("test").setId(id).setSource("rank", i)); } indexRandom(true, docbuilders); @@ -330,7 +326,7 @@ public void testSimpleIndexSortEarlyTerminate() throws Exception { public void testInsaneFromAndSize() throws Exception { createIndex("idx"); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertWindowFails(client().prepareSearch("idx").setFrom(Integer.MAX_VALUE)); assertWindowFails(client().prepareSearch("idx").setSize(Integer.MAX_VALUE)); @@ -338,7 +334,7 @@ public void testInsaneFromAndSize() throws Exception { public void testTooLargeFromAndSize() throws Exception { createIndex("idx"); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertWindowFails(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY))); assertWindowFails(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) + 1)); @@ -351,7 +347,7 @@ public void testTooLargeFromAndSize() throws Exception { public void testLargeFromAndSizeSucceeds() throws Exception { createIndex("idx"); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) - 10).get(), 1); assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)).get(), 1); @@ -369,7 +365,7 @@ public void testTooLargeFromAndSizeOkBySetting() throws Exception { Settings.builder() .put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 2) ).get(); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)).get(), 1); assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) + 1).get(), 1); @@ -397,7 +393,7 @@ public void testTooLargeFromAndSizeOkByDynamicSetting() throws Exception { ) .get() ); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)).get(), 1); assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) + 1).get(), 1); @@ -412,7 +408,7 @@ public void testTooLargeFromAndSizeOkByDynamicSetting() throws Exception { public void testTooLargeFromAndSizeBackwardsCompatibilityRecommendation() throws Exception { prepareCreate("idx").setSettings(Settings.builder().put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), Integer.MAX_VALUE)).get(); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 10).get(), 1); assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 10).get(), 1); @@ -427,7 +423,7 @@ public void testTooLargeFromAndSizeBackwardsCompatibilityRecommendation() throws public void testTooLargeRescoreWindow() throws Exception { createIndex("idx"); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertRescoreWindowFails(Integer.MAX_VALUE); assertRescoreWindowFails(IndexSettings.MAX_RESCORE_WINDOW_SETTING.get(Settings.EMPTY) + 1); @@ -437,7 +433,7 @@ public void testTooLargeRescoreOkBySetting() throws Exception { int defaultMaxWindow = IndexSettings.MAX_RESCORE_WINDOW_SETTING.get(Settings.EMPTY); prepareCreate("idx").setSettings(Settings.builder().put(IndexSettings.MAX_RESCORE_WINDOW_SETTING.getKey(), defaultMaxWindow * 2)) .get(); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertHitCount( client().prepareSearch("idx").addRescorer(new QueryRescorerBuilder(matchAllQuery()).windowSize(defaultMaxWindow + 1)).get(), @@ -454,7 +450,7 @@ public void testTooLargeRescoreOkByResultWindowSetting() throws Exception { defaultMaxWindow * 2 ) ).get(); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertHitCount( client().prepareSearch("idx").addRescorer(new QueryRescorerBuilder(matchAllQuery()).windowSize(defaultMaxWindow + 1)).get(), @@ -472,7 +468,7 @@ public void testTooLargeRescoreOkByDynamicSetting() throws Exception { .setSettings(Settings.builder().put(IndexSettings.MAX_RESCORE_WINDOW_SETTING.getKey(), defaultMaxWindow * 2)) .get() ); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertHitCount( client().prepareSearch("idx").addRescorer(new QueryRescorerBuilder(matchAllQuery()).windowSize(defaultMaxWindow + 1)).get(), @@ -493,7 +489,7 @@ public void testTooLargeRescoreOkByDynamicResultWindowSetting() throws Exception ) .get() ); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); assertHitCount( client().prepareSearch("idx").addRescorer(new QueryRescorerBuilder(matchAllQuery()).windowSize(defaultMaxWindow + 1)).get(), @@ -517,7 +513,7 @@ public void testTermQueryBigInt() throws Exception { prepareCreate("idx").addMapping("type", "field", "type=keyword").get(); ensureGreen("idx"); - client().prepareIndex("idx", "type") + client().prepareIndex("idx") .setId("1") .setSource("{\"field\" : 80315953321748200608 }", XContentType.JSON) .setRefreshPolicy(RefreshPolicy.IMMEDIATE) @@ -533,7 +529,7 @@ public void testTermQueryBigInt() throws Exception { public void testTooLongRegexInRegexpQuery() throws Exception { createIndex("idx"); - indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON)); + indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); int defaultMaxRegexLength = IndexSettings.MAX_REGEX_LENGTH_SETTING.get(Settings.EMPTY); StringBuilder regexp = new StringBuilder(defaultMaxRegexLength); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java index be5506291a2c1..9c735c42052e3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java @@ -43,7 +43,6 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.search.Scroll; import org.opensearch.search.SearchException; import org.opensearch.search.SearchHit; @@ -67,7 +66,6 @@ private void setupIndex(int numDocs, int numberOfShards) throws IOException, Exe String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("invalid_random_kw") .field("type", "keyword") @@ -83,14 +81,13 @@ private void setupIndex(int numDocs, int numberOfShards) throws IOException, Exe .endObject() .endObject() .endObject() - .endObject() ); assertAcked( client().admin() .indices() .prepareCreate("test") .setSettings(Settings.builder().put("number_of_shards", numberOfShards).put("index.max_slices_per_scroll", 10000)) - .addMapping("type", mapping, XContentType.JSON) + .setMapping(mapping) ); ensureGreen(); @@ -102,7 +99,7 @@ private void setupIndex(int numDocs, int numberOfShards) throws IOException, Exe .field("static_int", 0) .field("invalid_random_int", randomInt()) .endObject(); - requests.add(client().prepareIndex("test", "type").setSource(builder)); + requests.add(client().prepareIndex("test").setSource(builder)); } indexRandom(true, requests); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java index 643a7875c0295..92dfedeb99a23 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java @@ -142,7 +142,7 @@ public void testIssue8226() { assertAcked(prepareCreate("test_" + i).addAlias(new Alias("test"))); } if (i > 0) { - client().prepareIndex("test_" + i, "foo", "" + i).setSource("{\"entry\": " + i + "}", XContentType.JSON).get(); + client().prepareIndex("test_" + i).setId("" + i).setSource("{\"entry\": " + i + "}", XContentType.JSON).get(); } } refresh(); @@ -188,7 +188,7 @@ public void testIssue6614() throws ExecutionException, InterruptedException { final int numDocs = randomIntBetween(1, 23); // hour of the day for (int j = 0; j < numDocs; j++) { builders.add( - client().prepareIndex(indexId, "type") + client().prepareIndex(indexId) .setSource( "foo", "bar", @@ -312,7 +312,7 @@ public void testRandomSorting() throws IOException, InterruptedException, Execut sparseBytes.put(ref, docId); } src.endObject(); - builders[i] = client().prepareIndex("test", "type", docId).setSource(src); + builders[i] = client().prepareIndex("test").setId(docId).setSource(src); } indexRandom(true, builders); { @@ -361,7 +361,7 @@ public void test3078() { ensureGreen(); for (int i = 1; i < 101; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", Integer.toString(i)).get(); + client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", Integer.toString(i)).get(); } refresh(); SearchResponse searchResponse = client().prepareSearch("test") @@ -373,7 +373,7 @@ public void test3078() { assertThat(searchResponse.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); // reindex and refresh - client().prepareIndex("test", "type", Integer.toString(1)).setSource("field", Integer.toString(1)).get(); + client().prepareIndex("test").setId(Integer.toString(1)).setSource("field", Integer.toString(1)).get(); refresh(); searchResponse = client().prepareSearch("test") @@ -385,7 +385,7 @@ public void test3078() { assertThat(searchResponse.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); // reindex - no refresh - client().prepareIndex("test", "type", Integer.toString(1)).setSource("field", Integer.toString(1)).get(); + client().prepareIndex("test").setId(Integer.toString(1)).setSource("field", Integer.toString(1)).get(); searchResponse = client().prepareSearch("test") .setQuery(matchAllQuery()) @@ -399,7 +399,7 @@ public void test3078() { forceMerge(); refresh(); - client().prepareIndex("test", "type", Integer.toString(1)).setSource("field", Integer.toString(1)).get(); + client().prepareIndex("test").setId(Integer.toString(1)).setSource("field", Integer.toString(1)).get(); searchResponse = client().prepareSearch("test") .setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)) @@ -422,9 +422,9 @@ public void testScoreSortDirection() throws Exception { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type", "1").setSource("field", 2).get(); - client().prepareIndex("test", "type", "2").setSource("field", 1).get(); - client().prepareIndex("test", "type", "3").setSource("field", 0).get(); + client().prepareIndex("test").setId("1").setSource("field", 2).get(); + client().prepareIndex("test").setId("2").setSource("field", 1).get(); + client().prepareIndex("test").setId("3").setSource("field", 0).get(); refresh(); @@ -460,9 +460,9 @@ public void testScoreSortDirectionWithFunctionScore() throws Exception { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type", "1").setSource("field", 2).get(); - client().prepareIndex("test", "type", "2").setSource("field", 1).get(); - client().prepareIndex("test", "type", "3").setSource("field", 0).get(); + client().prepareIndex("test").setId("1").setSource("field", 2).get(); + client().prepareIndex("test").setId("2").setSource("field", 1).get(); + client().prepareIndex("test").setId("3").setSource("field", 0).get(); refresh(); @@ -497,9 +497,9 @@ public void testScoreSortDirectionWithFunctionScore() throws Exception { public void testIssue2986() { assertAcked(client().admin().indices().prepareCreate("test").addMapping("post", "field1", "type=keyword").get()); - client().prepareIndex("test", "post", "1").setSource("{\"field1\":\"value1\"}", XContentType.JSON).get(); - client().prepareIndex("test", "post", "2").setSource("{\"field1\":\"value2\"}", XContentType.JSON).get(); - client().prepareIndex("test", "post", "3").setSource("{\"field1\":\"value3\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{\"field1\":\"value1\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("2").setSource("{\"field1\":\"value2\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("3").setSource("{\"field1\":\"value3\"}", XContentType.JSON).get(); refresh(); SearchResponse result = client().prepareSearch("test") .setQuery(matchAllQuery()) @@ -521,16 +521,16 @@ public void testIssue2991() { } assertAcked(client().admin().indices().prepareCreate("test").addMapping("type", "tag", "type=keyword").get()); ensureGreen(); - client().prepareIndex("test", "type", "1").setSource("tag", "alpha").get(); + client().prepareIndex("test").setId("1").setSource("tag", "alpha").get(); refresh(); - client().prepareIndex("test", "type", "3").setSource("tag", "gamma").get(); + client().prepareIndex("test").setId("3").setSource("tag", "gamma").get(); refresh(); - client().prepareIndex("test", "type", "4").setSource("tag", "delta").get(); + client().prepareIndex("test").setId("4").setSource("tag", "delta").get(); refresh(); - client().prepareIndex("test", "type", "2").setSource("tag", "beta").get(); + client().prepareIndex("test").setId("2").setSource("tag", "beta").get(); refresh(); SearchResponse resp = client().prepareSearch("test") @@ -596,7 +596,8 @@ public void testSimpleSorts() throws Exception { ensureGreen(); List builders = new ArrayList<>(); for (int i = 0; i < 10; i++) { - IndexRequestBuilder builder = client().prepareIndex("test", "type1", Integer.toString(i)) + IndexRequestBuilder builder = client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource( jsonBuilder().startObject() .field("str_value", new String(new char[] { (char) (97 + i), (char) (97 + i) })) @@ -818,13 +819,15 @@ public void testSortMissingNumbers() throws Exception { ) ); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("id", "1").field("i_value", -1).field("d_value", -1.1).endObject()) .get(); - client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject().field("id", "2").endObject()).get(); + client().prepareIndex("test").setId("2").setSource(jsonBuilder().startObject().field("id", "2").endObject()).get(); - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource(jsonBuilder().startObject().field("id", "1").field("i_value", 2).field("d_value", 2.2).endObject()) .get(); @@ -885,13 +888,15 @@ public void testSortMissingStrings() throws IOException { ) ); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("id", "1").field("value", "a").endObject()) .get(); - client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject().field("id", "2").endObject()).get(); + client().prepareIndex("test").setId("2").setSource(jsonBuilder().startObject().field("id", "2").endObject()).get(); - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource(jsonBuilder().startObject().field("id", "1").field("value", "c").endObject()) .get(); @@ -957,7 +962,8 @@ public void testSortMissingStrings() throws IOException { public void testIgnoreUnmapped() throws Exception { createIndex("test"); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("id", "1").field("i_value", -1).field("d_value", -1.1).endObject()) .get(); @@ -1037,7 +1043,8 @@ public void testSortMVField() throws Exception { ); ensureGreen(); - client().prepareIndex("test", "type1", Integer.toString(1)) + client().prepareIndex("test") + .setId(Integer.toString(1)) .setSource( jsonBuilder().startObject() .array("long_values", 1L, 5L, 10L, 8L) @@ -1050,7 +1057,8 @@ public void testSortMVField() throws Exception { .endObject() ) .get(); - client().prepareIndex("test", "type1", Integer.toString(2)) + client().prepareIndex("test") + .setId(Integer.toString(2)) .setSource( jsonBuilder().startObject() .array("long_values", 11L, 15L, 20L, 7L) @@ -1063,7 +1071,8 @@ public void testSortMVField() throws Exception { .endObject() ) .get(); - client().prepareIndex("test", "type1", Integer.toString(3)) + client().prepareIndex("test") + .setId(Integer.toString(3)) .setSource( jsonBuilder().startObject() .array("long_values", 2L, 1L, 3L, -4L) @@ -1351,7 +1360,8 @@ public void testSortOnRareField() throws IOException { ) ); ensureGreen(); - client().prepareIndex("test", "type1", Integer.toString(1)) + client().prepareIndex("test") + .setId(Integer.toString(1)) .setSource(jsonBuilder().startObject().array("string_values", "01", "05", "10", "08").endObject()) .get(); @@ -1367,11 +1377,13 @@ public void testSortOnRareField() throws IOException { assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(Integer.toString(1))); assertThat(searchResponse.getHits().getAt(0).getSortValues()[0], equalTo("10")); - client().prepareIndex("test", "type1", Integer.toString(2)) + client().prepareIndex("test") + .setId(Integer.toString(2)) .setSource(jsonBuilder().startObject().array("string_values", "11", "15", "20", "07").endObject()) .get(); for (int i = 0; i < 15; i++) { - client().prepareIndex("test", "type1", Integer.toString(300 + i)) + client().prepareIndex("test") + .setId(Integer.toString(300 + i)) .setSource(jsonBuilder().startObject().array("some_other_field", "foobar").endObject()) .get(); } @@ -1387,11 +1399,13 @@ public void testSortOnRareField() throws IOException { assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); assertThat(searchResponse.getHits().getAt(1).getSortValues()[0], equalTo("10")); - client().prepareIndex("test", "type1", Integer.toString(3)) + client().prepareIndex("test") + .setId(Integer.toString(3)) .setSource(jsonBuilder().startObject().array("string_values", "02", "01", "03", "!4").endObject()) .get(); for (int i = 0; i < 15; i++) { - client().prepareIndex("test", "type1", Integer.toString(300 + i)) + client().prepareIndex("test") + .setId(Integer.toString(300 + i)) .setSource(jsonBuilder().startObject().array("some_other_field", "foobar").endObject()) .get(); } @@ -1411,7 +1425,8 @@ public void testSortOnRareField() throws IOException { assertThat(searchResponse.getHits().getAt(2).getSortValues()[0], equalTo("03")); for (int i = 0; i < 15; i++) { - client().prepareIndex("test", "type1", Integer.toString(300 + i)) + client().prepareIndex("test") + .setId(Integer.toString(300 + i)) .setSource(jsonBuilder().startObject().array("some_other_field", "foobar").endObject()) .get(); refresh(); @@ -1443,7 +1458,7 @@ public void testSortMetaField() throws Exception { final int numDocs = randomIntBetween(10, 20); IndexRequestBuilder[] indexReqs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; ++i) { - indexReqs[i] = client().prepareIndex("test", "type", Integer.toString(i)).setSource(); + indexReqs[i] = client().prepareIndex("test").setId(Integer.toString(i)).setSource(); } indexRandom(true, indexReqs); @@ -1520,7 +1535,8 @@ public void testNestedSort() throws IOException, InterruptedException, Execution ); ensureGreen(); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .startArray("nested") @@ -1534,7 +1550,8 @@ public void testNestedSort() throws IOException, InterruptedException, Execution .endObject() ) .get(); - client().prepareIndex("test", "type", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .startArray("nested") @@ -1625,7 +1642,7 @@ public void testSortDuelBetweenSingleShardAndMultiShardIndex() throws Exception for (String index : new String[] { "test1", "test2" }) { List docs = new ArrayList<>(); for (int i = 0; i < 256; i++) { - docs.add(client().prepareIndex(index, "type", Integer.toString(i)).setSource(sortField, i)); + docs.add(client().prepareIndex(index).setId(Integer.toString(i)).setSource(sortField, i)); } indexRandom(true, docs); } @@ -1657,8 +1674,8 @@ public void testCustomFormat() throws Exception { assertAcked(prepareCreate("test").addMapping("type", "ip", "type=ip")); indexRandom( true, - client().prepareIndex("test", "type", "1").setSource("ip", "192.168.1.7"), - client().prepareIndex("test", "type", "2").setSource("ip", "2001:db8::ff00:42:8329") + client().prepareIndex("test").setId("1").setSource("ip", "192.168.1.7"), + client().prepareIndex("test").setId("2").setSource("ip", "2001:db8::ff00:42:8329") ); SearchResponse response = client().prepareSearch("test").addSort(SortBuilders.fieldSort("ip")).get(); @@ -1681,7 +1698,7 @@ public void testScriptFieldSort() throws Exception { IndexRequestBuilder[] indexReqs = new IndexRequestBuilder[numDocs]; List keywords = new ArrayList<>(); for (int i = 0; i < numDocs; ++i) { - indexReqs[i] = client().prepareIndex("test", "t").setSource("number", i, "keyword", Integer.toString(i)); + indexReqs[i] = client().prepareIndex("test").setSource("number", i, "keyword", Integer.toString(i)); keywords.add(Integer.toString(i)); } Collections.sort(keywords); @@ -1732,9 +1749,9 @@ public void testFieldAlias() throws Exception { ensureGreen("old_index", "new_index"); List builders = new ArrayList<>(); - builders.add(client().prepareIndex("old_index", "_doc").setSource("distance", 42.0)); - builders.add(client().prepareIndex("old_index", "_doc").setSource("distance", 50.5)); - builders.add(client().prepareIndex("new_index", "_doc").setSource("route_length_miles", 100.2)); + builders.add(client().prepareIndex("old_index").setSource("distance", 42.0)); + builders.add(client().prepareIndex("old_index").setSource("distance", 50.5)); + builders.add(client().prepareIndex("new_index").setSource("route_length_miles", 100.2)); indexRandom(true, true, builders); SearchResponse response = client().prepareSearch() @@ -1760,9 +1777,9 @@ public void testFieldAliasesWithMissingValues() throws Exception { ensureGreen("old_index", "new_index"); List builders = new ArrayList<>(); - builders.add(client().prepareIndex("old_index", "_doc").setSource("distance", 42.0)); - builders.add(client().prepareIndex("old_index", "_doc").setSource(Collections.emptyMap())); - builders.add(client().prepareIndex("new_index", "_doc").setSource("route_length_miles", 100.2)); + builders.add(client().prepareIndex("old_index").setSource("distance", 42.0)); + builders.add(client().prepareIndex("old_index").setSource(Collections.emptyMap())); + builders.add(client().prepareIndex("new_index").setSource("route_length_miles", 100.2)); indexRandom(true, true, builders); SearchResponse response = client().prepareSearch() @@ -1785,9 +1802,9 @@ public void testCastNumericType() throws Exception { ensureGreen("index_double", "index_long", "index_float"); List builders = new ArrayList<>(); - builders.add(client().prepareIndex("index_double", "_doc").setSource("field", 12.6)); - builders.add(client().prepareIndex("index_long", "_doc").setSource("field", 12)); - builders.add(client().prepareIndex("index_float", "_doc").setSource("field", 12.1)); + builders.add(client().prepareIndex("index_double").setSource("field", 12.6)); + builders.add(client().prepareIndex("index_long").setSource("field", 12)); + builders.add(client().prepareIndex("index_float").setSource("field", 12.1)); indexRandom(true, true, builders); { @@ -1830,8 +1847,8 @@ public void testCastDate() throws Exception { ensureGreen("index_date", "index_date_nanos"); List builders = new ArrayList<>(); - builders.add(client().prepareIndex("index_date", "_doc").setSource("field", "2024-04-11T23:47:17")); - builders.add(client().prepareIndex("index_date_nanos", "_doc").setSource("field", "2024-04-11T23:47:16.854775807Z")); + builders.add(client().prepareIndex("index_date").setSource("field", "2024-04-11T23:47:17")); + builders.add(client().prepareIndex("index_date_nanos").setSource("field", "2024-04-11T23:47:16.854775807Z")); indexRandom(true, true, builders); { @@ -1913,7 +1930,7 @@ public void testCastDate() throws Exception { { builders.clear(); - builders.add(client().prepareIndex("index_date", "_doc").setSource("field", "1905-04-11T23:47:17")); + builders.add(client().prepareIndex("index_date").setSource("field", "1905-04-11T23:47:17")); indexRandom(true, true, builders); SearchResponse response = client().prepareSearch() .setQuery(matchAllQuery()) @@ -1927,7 +1944,7 @@ public void testCastDate() throws Exception { { builders.clear(); - builders.add(client().prepareIndex("index_date", "_doc").setSource("field", "2346-04-11T23:47:17")); + builders.add(client().prepareIndex("index_date").setSource("field", "2346-04-11T23:47:17")); indexRandom(true, true, builders); SearchResponse response = client().prepareSearch() .setQuery(QueryBuilders.rangeQuery("field").gt("1970-01-01")) @@ -1972,7 +1989,7 @@ public void testLongSortOptimizationCorrectResults() { bulkBuilder = client().prepareBulk(); } String source = "{\"long_field\":" + randomLong() + "}"; - bulkBuilder.add(client().prepareIndex("test1", "_doc").setId(Integer.toString(i)).setSource(source, XContentType.JSON)); + bulkBuilder.add(client().prepareIndex("test1").setId(Integer.toString(i)).setSource(source, XContentType.JSON)); } refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java index 3774f4e7c7f4c..1739add2ff5e8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java @@ -80,7 +80,8 @@ public void testDistanceSortingMVFields() throws Exception { assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("names", "New York") @@ -92,7 +93,8 @@ public void testDistanceSortingMVFields() throws Exception { ) .get(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource( jsonBuilder().startObject() .field("names", "New York 2") @@ -104,7 +106,8 @@ public void testDistanceSortingMVFields() throws Exception { ) .get(); - client().prepareIndex("test", "type1", "3") + client().prepareIndex("test") + .setId("3") .setSource( jsonBuilder().startObject() .array("names", "Times Square", "Tribeca") @@ -124,7 +127,8 @@ public void testDistanceSortingMVFields() throws Exception { ) .get(); - client().prepareIndex("test", "type1", "4") + client().prepareIndex("test") + .setId("4") .setSource( jsonBuilder().startObject() .array("names", "Wall Street", "Soho") @@ -144,7 +148,8 @@ public void testDistanceSortingMVFields() throws Exception { ) .get(); - client().prepareIndex("test", "type1", "5") + client().prepareIndex("test") + .setId("5") .setSource( jsonBuilder().startObject() .array("names", "Greenwich Village", "Brooklyn") @@ -271,7 +276,8 @@ public void testDistanceSortingWithMissingGeoPoint() throws Exception { assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .array("names", "Times Square", "Tribeca") @@ -291,7 +297,8 @@ public void testDistanceSortingWithMissingGeoPoint() throws Exception { ) .get(); - client().prepareIndex("test", "type1", "2") + client().prepareIndex("test") + .setId("2") .setSource(jsonBuilder().startObject().array("names", "Wall Street", "Soho").endObject()) .get(); @@ -346,7 +353,8 @@ public void testDistanceSortingNestedFields() throws Exception { indexRandom( true, - client().prepareIndex("companies", "company", "1") + client().prepareIndex("companies") + .setId("1") .setSource( jsonBuilder().startObject() .field("name", "company 1") @@ -361,7 +369,8 @@ public void testDistanceSortingNestedFields() throws Exception { .endArray() .endObject() ), - client().prepareIndex("companies", "company", "2") + client().prepareIndex("companies") + .setId("2") .setSource( jsonBuilder().startObject() .field("name", "company 2") @@ -385,7 +394,8 @@ public void testDistanceSortingNestedFields() throws Exception { .endArray() .endObject() ), - client().prepareIndex("companies", "company", "3") + client().prepareIndex("companies") + .setId("3") .setSource( jsonBuilder().startObject() .field("name", "company 3") @@ -408,7 +418,8 @@ public void testDistanceSortingNestedFields() throws Exception { .endArray() .endObject() ), - client().prepareIndex("companies", "company", "4") + client().prepareIndex("companies") + .setId("4") .setSource( jsonBuilder().startObject() .field("name", "company 4") @@ -588,9 +599,9 @@ public void testGeoDistanceFilter() throws IOException { XContentBuilder source = JsonXContent.contentBuilder().startObject().field("pin", Geohash.stringEncode(lon, lat)).endObject(); assertAcked(prepareCreate("locations").setSettings(settings).addMapping("location", mapping)); - client().prepareIndex("locations", "location", "1").setCreate(true).setSource(source).get(); + client().prepareIndex("locations").setId("1").setCreate(true).setSource(source).get(); refresh(); - client().prepareGet("locations", "location", "1").get(); + client().prepareGet("locations", "1").get(); SearchResponse result = client().prepareSearch("locations") .setQuery(QueryBuilders.matchAllQuery()) @@ -612,7 +623,8 @@ public void testDistanceSortingWithUnmappedField() throws Exception { assertAcked(prepareCreate("test2")); ensureGreen(); - client().prepareIndex("test1", "type1", "1") + client().prepareIndex("test1") + .setId("1") .setSource( jsonBuilder().startObject() .array("names", "Times Square", "Tribeca") @@ -632,7 +644,8 @@ public void testDistanceSortingWithUnmappedField() throws Exception { ) .get(); - client().prepareIndex("test2", "type1", "2") + client().prepareIndex("test2") + .setId("2") .setSource(jsonBuilder().startObject().array("names", "Wall Street", "Soho").endObject()) .get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java index 74204950a11c9..c283444666f0b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java @@ -96,8 +96,8 @@ public void testManyToManyGeoPoints() throws ExecutionException, InterruptedExce logger.info("d2: {}", d2Builder); indexRandom( true, - client().prepareIndex("index", "type", "d1").setSource(d1Builder), - client().prepareIndex("index", "type", "d2").setSource(d2Builder) + client().prepareIndex("index").setId("d1").setSource(d1Builder), + client().prepareIndex("index").setId("d2").setSource(d2Builder) ); GeoPoint[] q = new GeoPoint[2]; if (randomBoolean()) { @@ -187,8 +187,8 @@ public void testSingeToManyAvgMedian() throws ExecutionException, InterruptedExc logger.info("d2: {}", d2Builder); indexRandom( true, - client().prepareIndex("index", "type", "d1").setSource(d1Builder), - client().prepareIndex("index", "type", "d2").setSource(d2Builder) + client().prepareIndex("index").setId("d1").setSource(d1Builder), + client().prepareIndex("index").setId("d2").setSource(d2Builder) ); GeoPoint q = new GeoPoint(0, 0); @@ -259,8 +259,8 @@ public void testManyToManyGeoPointsWithDifferentFormats() throws ExecutionExcept indexRandom( true, - client().prepareIndex("index", "type", "d1").setSource(d1Builder), - client().prepareIndex("index", "type", "d2").setSource(d2Builder) + client().prepareIndex("index").setId("d1").setSource(d1Builder), + client().prepareIndex("index").setId("d2").setSource(d2Builder) ); List qPoints = Arrays.asList(new GeoPoint(2, 1), new GeoPoint(2, 2), new GeoPoint(2, 3), new GeoPoint(2, 4)); @@ -309,9 +309,11 @@ public void testSinglePointGeoDistanceSort() throws ExecutionException, Interrup assertAcked(prepareCreate("index").addMapping("type", LOCATION_FIELD, "type=geo_point")); indexRandom( true, - client().prepareIndex("index", "type", "d1") + client().prepareIndex("index") + .setId("d1") .setSource(jsonBuilder().startObject().startObject(LOCATION_FIELD).field("lat", 1).field("lon", 1).endObject().endObject()), - client().prepareIndex("index", "type", "d2") + client().prepareIndex("index") + .setId("d2") .setSource(jsonBuilder().startObject().startObject(LOCATION_FIELD).field("lat", 1).field("lon", 2).endObject().endObject()) ); @@ -387,8 +389,8 @@ public void testCrossIndexIgnoreUnmapped() throws Exception { indexRandom( true, - client().prepareIndex("test1", "type").setSource("str_field", "bcd", "long_field", 3, "double_field", 0.65), - client().prepareIndex("test2", "type").setSource() + client().prepareIndex("test1").setSource("str_field", "bcd", "long_field", 3, "double_field", 0.65), + client().prepareIndex("test2").setSource() ); SearchResponse resp = client().prepareSearch("test1", "test2") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java index 62271cb023fde..70bb24532aa7d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java @@ -38,7 +38,6 @@ import org.opensearch.common.Strings; import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.geo.GeoUtils; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.plugins.Plugin; import org.opensearch.script.MockScriptPlugin; @@ -173,7 +172,8 @@ public void testSimpleSorts() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < 10; i++) { builders.add( - client().prepareIndex("test", "type1", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource( jsonBuilder().startObject() .field("str_value", new String(new char[] { (char) (97 + i), (char) (97 + i) })) @@ -242,7 +242,6 @@ public void testSimpleSorts() throws Exception { public void testSortMinValueScript() throws IOException { String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("lvalue") .field("type", "long") @@ -258,14 +257,14 @@ public void testSortMinValueScript() throws IOException { .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(prepareCreate("test").addMapping("type1", mapping, XContentType.JSON)); + assertAcked(prepareCreate("test").setMapping(mapping)); ensureGreen(); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "type1", "" + i) + client().prepareIndex("test") + .setId("" + i) .setSource( jsonBuilder().startObject() .field("ord", i) @@ -282,7 +281,7 @@ public void testSortMinValueScript() throws IOException { } for (int i = 10; i < 20; i++) { // add some docs that don't have values in those fields - client().prepareIndex("test", "type1", "" + i).setSource(jsonBuilder().startObject().field("ord", i).endObject()).get(); + client().prepareIndex("test").setId("" + i).setSource(jsonBuilder().startObject().field("ord", i).endObject()).get(); } client().admin().indices().prepareRefresh("test").get(); @@ -357,7 +356,6 @@ public void testDocumentsWithNullValue() throws Exception { // be propagated to all nodes yet and sort operation fail when the sort field is not defined String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("id") .field("type", "keyword") @@ -367,22 +365,15 @@ public void testDocumentsWithNullValue() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(prepareCreate("test").addMapping("type1", mapping, XContentType.JSON)); + assertAcked(prepareCreate("test").setMapping(mapping)); ensureGreen(); - client().prepareIndex("test", "type1") - .setSource(jsonBuilder().startObject().field("id", "1").field("svalue", "aaa").endObject()) - .get(); + client().prepareIndex("test").setSource(jsonBuilder().startObject().field("id", "1").field("svalue", "aaa").endObject()).get(); - client().prepareIndex("test", "type1") - .setSource(jsonBuilder().startObject().field("id", "2").nullField("svalue").endObject()) - .get(); + client().prepareIndex("test").setSource(jsonBuilder().startObject().field("id", "2").nullField("svalue").endObject()).get(); - client().prepareIndex("test", "type1") - .setSource(jsonBuilder().startObject().field("id", "3").field("svalue", "bbb").endObject()) - .get(); + client().prepareIndex("test").setSource(jsonBuilder().startObject().field("id", "3").field("svalue", "bbb").endObject()).get(); flush(); refresh(); @@ -470,7 +461,8 @@ public void test2920() throws IOException { ); ensureGreen(); for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "test", Integer.toString(i)) + client().prepareIndex("test") + .setId(Integer.toString(i)) .setSource(jsonBuilder().startObject().field("value", "" + i).endObject()) .get(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/SortFromPluginIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/SortFromPluginIT.java index b36168f2a110f..e9fc1c54ad234 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/SortFromPluginIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/SortFromPluginIT.java @@ -33,9 +33,9 @@ public void testPluginSort() throws Exception { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type", "1").setSource("field", 2).get(); - client().prepareIndex("test", "type", "2").setSource("field", 1).get(); - client().prepareIndex("test", "type", "3").setSource("field", 0).get(); + client().prepareIndex("test").setId("1").setSource("field", 2).get(); + client().prepareIndex("test").setId("2").setSource("field", 1).get(); + client().prepareIndex("test").setId("3").setSource("field", 0).get(); refresh(); @@ -54,9 +54,9 @@ public void testPluginSortXContent() throws Exception { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type", "1").setSource("field", 2).get(); - client().prepareIndex("test", "type", "2").setSource("field", 1).get(); - client().prepareIndex("test", "type", "3").setSource("field", 0).get(); + client().prepareIndex("test").setId("1").setSource("field", 2).get(); + client().prepareIndex("test").setId("2").setSource("field", 1).get(); + client().prepareIndex("test").setId("3").setSource("field", 0).get(); refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java index d2ca32173a978..758d749f0be8e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java @@ -55,25 +55,23 @@ public void testSimple() { assertAcked(prepareCreate("test")); ensureGreen(); - client().prepareIndex("test", "_doc", "1").setSource("field", "value").get(); + client().prepareIndex("test").setId("1").setSource("field", "value").get(); refresh(); SearchResponse response = client().prepareSearch("test").storedFields("_none_").setFetchSource(false).setVersion(true).get(); assertThat(response.getHits().getAt(0).getId(), nullValue()); - assertThat(response.getHits().getAt(0).getType(), equalTo("_doc")); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); assertThat(response.getHits().getAt(0).getVersion(), notNullValue()); response = client().prepareSearch("test").storedFields("_none_").get(); assertThat(response.getHits().getAt(0).getId(), nullValue()); - assertThat(response.getHits().getAt(0).getType(), equalTo("_doc")); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); } public void testInnerHits() { assertAcked(prepareCreate("test").addMapping("_doc", "nested", "type=nested")); ensureGreen(); - client().prepareIndex("test", "_doc", "1").setSource("field", "value", "nested", Collections.singletonMap("title", "foo")).get(); + client().prepareIndex("test").setId("1").setSource("field", "value", "nested", Collections.singletonMap("title", "foo")).get(); refresh(); SearchResponse response = client().prepareSearch("test") @@ -88,13 +86,11 @@ public void testInnerHits() { .get(); assertThat(response.getHits().getTotalHits().value, equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), nullValue()); - assertThat(response.getHits().getAt(0).getType(), equalTo("_doc")); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits hits = response.getHits().getAt(0).getInnerHits().get("nested"); assertThat(hits.getTotalHits().value, equalTo(1L)); assertThat(hits.getAt(0).getId(), nullValue()); - assertThat(hits.getAt(0).getType(), equalTo("_doc")); assertThat(hits.getAt(0).getSourceAsString(), nullValue()); } @@ -102,18 +98,16 @@ public void testWithRouting() { assertAcked(prepareCreate("test")); ensureGreen(); - client().prepareIndex("test", "_doc", "1").setSource("field", "value").setRouting("toto").get(); + client().prepareIndex("test").setId("1").setSource("field", "value").setRouting("toto").get(); refresh(); SearchResponse response = client().prepareSearch("test").storedFields("_none_").setFetchSource(false).get(); assertThat(response.getHits().getAt(0).getId(), nullValue()); - assertThat(response.getHits().getAt(0).getType(), equalTo("_doc")); assertThat(response.getHits().getAt(0).field("_routing"), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); response = client().prepareSearch("test").storedFields("_none_").get(); assertThat(response.getHits().getAt(0).getId(), nullValue()); - assertThat(response.getHits().getAt(0).getType(), equalTo("_doc")); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/source/SourceFetchingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/source/SourceFetchingIT.java index 266cccc08ef18..11223d11ff30d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/source/SourceFetchingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/source/SourceFetchingIT.java @@ -62,7 +62,7 @@ public void testSourceFiltering() { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("field1", "value", "field2", "value2").get(); + client().prepareIndex("test").setId("1").setSource("field1", "value", "field2", "value2").get(); refresh(); SearchResponse response = client().prepareSearch("test").setFetchSource(false).get(); @@ -95,7 +95,7 @@ public void testSourceWithWildcardFiltering() { createIndex("test"); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource("field", "value").get(); + client().prepareIndex("test").setId("1").setSource("field", "value").get(); refresh(); SearchResponse response = client().prepareSearch("test").setFetchSource(new String[] { "*.notexisting", "field" }, null).get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java index 66c56f654e34f..c72b5d40553b3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java @@ -111,7 +111,7 @@ public void testSimpleStats() throws Exception { ); int docsTest1 = scaledRandomIntBetween(3 * shardsIdx1, 5 * shardsIdx1); for (int i = 0; i < docsTest1; i++) { - client().prepareIndex("test1", "type", Integer.toString(i)).setSource("field", "value").get(); + client().prepareIndex("test1").setId(Integer.toString(i)).setSource("field", "value").get(); if (rarely()) { refresh(); } @@ -123,7 +123,7 @@ public void testSimpleStats() throws Exception { ); int docsTest2 = scaledRandomIntBetween(3 * shardsIdx2, 5 * shardsIdx2); for (int i = 0; i < docsTest2; i++) { - client().prepareIndex("test2", "type", Integer.toString(i)).setSource("field", "value").get(); + client().prepareIndex("test2").setId(Integer.toString(i)).setSource("field", "value").get(); if (rarely()) { refresh(); } @@ -207,7 +207,8 @@ public void testOpenContexts() { final int docs = scaledRandomIntBetween(20, 50); for (int s = 0; s < numAssignedShards(index); s++) { for (int i = 0; i < docs; i++) { - client().prepareIndex(index, "type", Integer.toString(s * docs + i)) + client().prepareIndex(index) + .setId(Integer.toString(s * docs + i)) .setSource("field", "value") .setRouting(Integer.toString(s)) .get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java index d7a79e473b64f..099ffbc278f81 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java @@ -49,6 +49,7 @@ import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.index.mapper.MapperParsingException; +import org.opensearch.index.mapper.MapperService; import org.opensearch.plugins.Plugin; import org.opensearch.search.aggregations.AggregationBuilders; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; @@ -96,7 +97,6 @@ @SuppressCodecs("*") // requires custom completion format public class CompletionSuggestSearchIT extends OpenSearchIntegTestCase { private final String INDEX = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT); - private final String TYPE = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT); private final String FIELD = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT); private final CompletionMappingBuilder completionMappingBuilder = new CompletionMappingBuilder(); @@ -117,7 +117,8 @@ public void testTieBreak() throws Exception { String value = "a" + randomAlphaOfLengthBetween(1, 10); entries[i] = value; indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject().startObject(FIELD).field("input", value).field("weight", 10).endObject().endObject() ) @@ -139,7 +140,8 @@ public void testPrefix() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 1; i <= numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -166,7 +168,8 @@ public void testTextAndGlobalText() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 1; i <= numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -202,7 +205,8 @@ public void testRegex() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 1; i <= numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -225,7 +229,8 @@ public void testFuzzy() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 1; i <= numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -248,7 +253,8 @@ public void testEarlyTermination() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -282,7 +288,8 @@ public void testSuggestDocument() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 1; i <= numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -317,7 +324,8 @@ public void testSuggestDocumentNoSource() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 1; i <= numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -355,7 +363,8 @@ public void testSuggestDocumentSourceFiltering() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 1; i <= numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -419,7 +428,8 @@ public void testThatWeightsAreWorking() throws Exception { List similarNames = Arrays.asList("the", "The Prodigy", "The Verve", "The the"); // the weight is 1000 divided by string length, so the results are easy to to check for (String similarName : similarNames) { - client().prepareIndex(INDEX, TYPE, similarName) + client().prepareIndex(INDEX) + .setId(similarName) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -443,7 +453,8 @@ public void testThatWeightMustBeAnInteger() throws Exception { MapperParsingException e = expectThrows( MapperParsingException.class, - () -> client().prepareIndex(INDEX, TYPE, "1") + () -> client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -462,7 +473,8 @@ public void testThatWeightMustBeAnInteger() throws Exception { public void testThatWeightCanBeAString() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -500,7 +512,8 @@ public void testThatWeightMustNotBeANonNumberString() throws Exception { MapperParsingException e = expectThrows( MapperParsingException.class, - () -> client().prepareIndex(INDEX, TYPE, "1") + () -> client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -523,7 +536,8 @@ public void testThatWeightAsStringMustBeInt() throws Exception { MapperParsingException e = expectThrows( MapperParsingException.class, - () -> client().prepareIndex(INDEX, TYPE, "1") + () -> client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -542,7 +556,8 @@ public void testThatWeightAsStringMustBeInt() throws Exception { public void testThatInputCanBeAStringInsteadOfAnArray() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource(jsonBuilder().startObject().startObject(FIELD).field("input", "Foo Fighters").endObject().endObject()) .get(); @@ -555,7 +570,8 @@ public void testDisabledPreserveSeparators() throws Exception { completionMappingBuilder.preserveSeparators(false); createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -568,7 +584,8 @@ public void testDisabledPreserveSeparators() throws Exception { ) .get(); - client().prepareIndex(INDEX, TYPE, "2") + client().prepareIndex(INDEX) + .setId("2") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -590,13 +607,15 @@ public void testEnabledPreserveSeparators() throws Exception { completionMappingBuilder.preserveSeparators(true); createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Foo Fighters").endArray().endObject().endObject() ) .get(); - client().prepareIndex(INDEX, TYPE, "2") + client().prepareIndex(INDEX) + .setId("2") .setSource(jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Foof").endArray().endObject().endObject()) .get(); @@ -608,7 +627,8 @@ public void testEnabledPreserveSeparators() throws Exception { public void testThatMultipleInputsAreSupported() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -630,7 +650,8 @@ public void testThatMultipleInputsAreSupported() throws Exception { public void testThatShortSyntaxIsWorking() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject().startArray(FIELD).value("The Prodigy Firestarter").value("Firestarter").endArray().endObject() ) @@ -647,7 +668,8 @@ public void testThatDisablingPositionIncrementsWorkForStopwords() throws Excepti completionMappingBuilder.searchAnalyzer("classic").indexAnalyzer("classic").preservePositionIncrements(false); createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject().startObject(FIELD).startArray("input").value("The Beatles").endArray().endObject().endObject() ) @@ -660,7 +682,7 @@ public void testThatDisablingPositionIncrementsWorkForStopwords() throws Excepti public void testThatUpgradeToMultiFieldsWorks() throws Exception { final XContentBuilder mapping = jsonBuilder().startObject() - .startObject(TYPE) + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject(FIELD) .field("type", "text") @@ -668,8 +690,9 @@ public void testThatUpgradeToMultiFieldsWorks() throws Exception { .endObject() .endObject() .endObject(); - assertAcked(prepareCreate(INDEX).addMapping(TYPE, mapping)); - client().prepareIndex(INDEX, TYPE, "1") + assertAcked(prepareCreate(INDEX).addMapping(MapperService.SINGLE_MAPPING_NAME, mapping)); + client().prepareIndex(INDEX) + .setId("1") .setRefreshPolicy(IMMEDIATE) .setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").endObject()) .get(); @@ -678,10 +701,9 @@ public void testThatUpgradeToMultiFieldsWorks() throws Exception { AcknowledgedResponse putMappingResponse = client().admin() .indices() .preparePutMapping(INDEX) - .setType(TYPE) .setSource( jsonBuilder().startObject() - .startObject(TYPE) + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject(FIELD) .field("type", "text") @@ -706,7 +728,8 @@ public void testThatUpgradeToMultiFieldsWorks() throws Exception { .get(); assertSuggestions(searchResponse, "suggs"); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setRefreshPolicy(IMMEDIATE) .setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").endObject()) .get(); @@ -723,7 +746,8 @@ public void testThatUpgradeToMultiFieldsWorks() throws Exception { public void testThatFuzzySuggesterWorks() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Nirvana").endArray().endObject().endObject() ) @@ -750,7 +774,8 @@ public void testThatFuzzySuggesterWorks() throws Exception { public void testThatFuzzySuggesterSupportsEditDistances() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Nirvana").endArray().endObject().endObject() ) @@ -784,7 +809,8 @@ public void testThatFuzzySuggesterSupportsEditDistances() throws Exception { public void testThatFuzzySuggesterSupportsTranspositions() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Nirvana").endArray().endObject().endObject() ) @@ -818,7 +844,8 @@ public void testThatFuzzySuggesterSupportsTranspositions() throws Exception { public void testThatFuzzySuggesterSupportsMinPrefixLength() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Nirvana").endArray().endObject().endObject() ) @@ -854,7 +881,8 @@ public void testThatFuzzySuggesterSupportsMinPrefixLength() throws Exception { public void testThatFuzzySuggesterSupportsNonPrefixLength() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Nirvana").endArray().endObject().endObject() ) @@ -890,7 +918,8 @@ public void testThatFuzzySuggesterSupportsNonPrefixLength() throws Exception { public void testThatFuzzySuggesterIsUnicodeAware() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource(jsonBuilder().startObject().startObject(FIELD).startArray("input").value("ööööö").endArray().endObject().endObject()) .get(); @@ -937,10 +966,9 @@ public void testThatStatsAreWorking() throws Exception { AcknowledgedResponse putMappingResponse = client().admin() .indices() .preparePutMapping(INDEX) - .setType(TYPE) .setSource( jsonBuilder().startObject() - .startObject(TYPE) + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject(FIELD) .field("type", "completion") @@ -958,10 +986,12 @@ public void testThatStatsAreWorking() throws Exception { assertThat(putMappingResponse.isAcknowledged(), is(true)); // Index two entities - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").field(otherField, "WHATEVER").endObject()) .get(); - client().prepareIndex(INDEX, TYPE, "2") + client().prepareIndex(INDEX) + .setId("2") .setSource(jsonBuilder().startObject().field(FIELD, "Bar Fighters").field(otherField, "WHATEVER2").endObject()) .get(); @@ -1021,7 +1051,8 @@ public void testThatStatsAreWorking() throws Exception { public void testThatSortingOnCompletionFieldReturnsUsefulException() throws Exception { createIndexAndMapping(completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Nirvana").endArray().endObject().endObject() ) @@ -1031,7 +1062,7 @@ public void testThatSortingOnCompletionFieldReturnsUsefulException() throws Exce SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch(INDEX).setTypes(TYPE).addSort(new FieldSortBuilder(FIELD)).get() + () -> client().prepareSearch(INDEX).addSort(new FieldSortBuilder(FIELD)).get() ); assertThat(e.status().getStatus(), is(400)); assertThat(e.toString(), containsString("Fielddata is not supported on field [" + FIELD + "] of type [completion]")); @@ -1050,7 +1081,8 @@ public void testThatSuggestStopFilterWorks() throws Exception { completionMappingBuilder.indexAnalyzer("simple"); createIndexAndMappingAndSettings(settingsBuilder.build(), completionMappingBuilder); - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -1064,7 +1096,8 @@ public void testThatSuggestStopFilterWorks() throws Exception { .get(); // Higher weight so it's ranked first: - client().prepareIndex(INDEX, TYPE, "2") + client().prepareIndex(INDEX) + .setId("2") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -1096,7 +1129,8 @@ public void testThatIndexingInvalidFieldsInCompletionFieldResultsInException() t createIndexAndMapping(completionMappingBuilder); try { - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -1128,7 +1162,7 @@ public void testSkipDuplicates() throws Exception { weights[id] = Math.max(weight, weights[id]); String suggestion = "suggestion-" + String.format(Locale.ENGLISH, "%03d", id); indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE) + client().prepareIndex(INDEX) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -1252,7 +1286,6 @@ private static List getNames(Suggest.Suggestion.Entry client().prepareIndex(INDEX, TYPE, "1") + () -> client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -1414,9 +1450,9 @@ public void testIssue5930() throws IOException { .indices() .prepareCreate(INDEX) .addMapping( - TYPE, + MapperService.SINGLE_MAPPING_NAME, jsonBuilder().startObject() - .startObject(TYPE) + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject(FIELD) .field("type", "completion") @@ -1428,7 +1464,8 @@ public void testIssue5930() throws IOException { .get() ); String string = "foo bar"; - client().prepareIndex(INDEX, TYPE, "1") + client().prepareIndex(INDEX) + .setId("1") .setSource(jsonBuilder().startObject().field(FIELD, string).endObject()) .setRefreshPolicy(IMMEDIATE) .get(); @@ -1451,7 +1488,8 @@ public void testMultiDocSuggestions() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 1; i <= numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -1470,7 +1508,7 @@ public void testMultiDocSuggestions() throws Exception { public void testSuggestWithFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject(TYPE) + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject(FIELD) .field("type", "completion") @@ -1482,12 +1520,12 @@ public void testSuggestWithFieldAlias() throws Exception { .endObject() .endObject() .endObject(); - assertAcked(prepareCreate(INDEX).addMapping(TYPE, mapping)); + assertAcked(prepareCreate(INDEX).addMapping(MapperService.SINGLE_MAPPING_NAME, mapping)); List builders = new ArrayList<>(); - builders.add(client().prepareIndex(INDEX, TYPE).setSource(FIELD, "apple")); - builders.add(client().prepareIndex(INDEX, TYPE).setSource(FIELD, "mango")); - builders.add(client().prepareIndex(INDEX, TYPE).setSource(FIELD, "papaya")); + builders.add(client().prepareIndex(INDEX).setSource(FIELD, "apple")); + builders.add(client().prepareIndex(INDEX).setSource(FIELD, "mango")); + builders.add(client().prepareIndex(INDEX).setSource(FIELD, "papaya")); indexRandom(true, false, builders); CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion("alias").text("app"); @@ -1501,7 +1539,8 @@ public void testSuggestOnlyExplain() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 1; i <= numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java index 01c867f487a3f..c9b14993d6e49 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -42,6 +42,7 @@ import org.opensearch.common.unit.Fuzziness; import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.index.mapper.MapperService; import org.opensearch.rest.RestStatus; import org.opensearch.search.suggest.CompletionSuggestSearchIT.CompletionMappingBuilder; import org.opensearch.search.suggest.completion.CompletionSuggestionBuilder; @@ -72,7 +73,6 @@ public class ContextCompletionSuggestSearchIT extends OpenSearchIntegTestCase { private final String INDEX = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT); - private final String TYPE = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT); private final String FIELD = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT); @Override @@ -102,7 +102,7 @@ public void testContextPrefix() throws Exception { source.field("type", "type" + i % 3); } source.endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } indexRandom(true, indexRequestBuilders); CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD) @@ -138,7 +138,7 @@ public void testContextRegex() throws Exception { source.field("type", "type" + i % 3); } source.endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } indexRandom(true, indexRequestBuilders); CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD) @@ -174,7 +174,7 @@ public void testContextFuzzy() throws Exception { source.field("type", "type" + i % 3); } source.endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } indexRandom(true, indexRequestBuilders); CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD) @@ -193,7 +193,8 @@ public void testContextFilteringWorksWithUTF8Categories() throws Exception { LinkedHashMap> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); - IndexResponse indexResponse = client().prepareIndex(INDEX, TYPE, "1") + IndexResponse indexResponse = client().prepareIndex(INDEX) + .setId("1") .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -222,7 +223,8 @@ public void testSingleContextFiltering() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -253,7 +255,8 @@ public void testSingleContextBoosting() throws Exception { List indexRequestBuilders = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { indexRequestBuilders.add( - client().prepareIndex(INDEX, TYPE, "" + i) + client().prepareIndex(INDEX) + .setId("" + i) .setSource( jsonBuilder().startObject() .startObject(FIELD) @@ -297,7 +300,7 @@ public void testMultiContextFiltering() throws Exception { .field("cat", "cat" + i % 2) .field("type", "type" + i % 4) .endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } indexRandom(true, indexRequestBuilders); @@ -339,7 +342,7 @@ public void testMultiContextBoosting() throws Exception { .field("cat", "cat" + i % 2) .field("type", "type" + i % 4) .endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } indexRandom(true, indexRequestBuilders); @@ -412,7 +415,7 @@ public void testSeveralContexts() throws Exception { source.field("type" + c, "type" + c + i % 4); } source.endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } indexRandom(true, indexRequestBuilders); @@ -445,7 +448,7 @@ public void testGeoFiltering() throws Exception { .endObject() .endObject() .endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } indexRandom(true, indexRequestBuilders); @@ -479,7 +482,7 @@ public void testGeoBoosting() throws Exception { .endObject() .endObject() .endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } indexRandom(true, indexRequestBuilders); @@ -512,7 +515,7 @@ public void testGeoPointContext() throws Exception { .endObject() .endObject() .endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } indexRandom(true, indexRequestBuilders); CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD) @@ -554,7 +557,7 @@ public void testGeoNeighbours() throws Exception { .endObject() .endObject() .endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } indexRandom(true, indexRequestBuilders); @@ -573,7 +576,6 @@ public void testGeoNeighbours() throws Exception { public void testGeoField() throws Exception { XContentBuilder mapping = jsonBuilder(); mapping.startObject(); - mapping.startObject(TYPE); mapping.startObject("properties"); mapping.startObject("location"); mapping.startObject("properties"); @@ -605,9 +607,8 @@ public void testGeoField() throws Exception { mapping.endObject(); mapping.endObject(); mapping.endObject(); - mapping.endObject(); - assertAcked(prepareCreate(INDEX).addMapping(TYPE, mapping)); + assertAcked(prepareCreate(INDEX).addMapping(MapperService.SINGLE_MAPPING_NAME, mapping)); XContentBuilder source1 = jsonBuilder().startObject() .startObject("location") @@ -617,7 +618,7 @@ public void testGeoField() throws Exception { .array("input", "Hotel Amsterdam in Berlin") .endObject() .endObject(); - client().prepareIndex(INDEX, TYPE, "1").setSource(source1).get(); + client().prepareIndex(INDEX).setId("1").setSource(source1).get(); XContentBuilder source2 = jsonBuilder().startObject() .startObject("location") @@ -627,7 +628,7 @@ public void testGeoField() throws Exception { .array("input", "Hotel Berlin in Amsterdam") .endObject() .endObject(); - client().prepareIndex(INDEX, TYPE, "2").setSource(source2).get(); + client().prepareIndex(INDEX).setId("2").setSource(source2).get(); refresh(); @@ -671,7 +672,7 @@ public void testSkipDuplicatesWithContexts() throws Exception { .field("cat", "cat" + id % 2) .field("type", "type" + id) .endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(source)); + indexRequestBuilders.add(client().prepareIndex(INDEX).setId("" + i).setSource(source)); } String[] expected = new String[numUnique]; for (int i = 0; i < numUnique; i++) { @@ -705,7 +706,6 @@ private void createIndexAndMapping(CompletionMappingBuilder completionMappingBui private void createIndexAndMappingAndSettings(Settings settings, CompletionMappingBuilder completionMappingBuilder) throws IOException { XContentBuilder mapping = jsonBuilder().startObject() - .startObject(TYPE) .startObject("properties") .startObject(FIELD) .field("type", "completion") @@ -747,14 +747,14 @@ private void createIndexAndMappingAndSettings(Settings settings, CompletionMappi for (String fieldName : categoryContextFields) { mapping.startObject(fieldName).field("type", randomBoolean() ? "keyword" : "text").endObject(); } - mapping.endObject().endObject().endObject(); + mapping.endObject().endObject(); assertAcked( client().admin() .indices() .prepareCreate(INDEX) .setSettings(Settings.builder().put(indexSettings()).put(settings)) - .addMapping(TYPE, mapping) + .addMapping(MapperService.SINGLE_MAPPING_NAME, mapping) .get() ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java index 4e116a7be140d..bb6e1643dd767 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java @@ -320,9 +320,9 @@ public void testUnmappedField() throws IOException, InterruptedException, Execut indexRandom( true, - client().prepareIndex("test", "type1").setSource("name", "I like iced tea"), - client().prepareIndex("test", "type1").setSource("name", "I like tea."), - client().prepareIndex("test", "type1").setSource("name", "I like ice cream.") + client().prepareIndex("test").setSource("name", "I like iced tea"), + client().prepareIndex("test").setSource("name", "I like tea."), + client().prepareIndex("test").setSource("name", "I like ice cream.") ); refresh(); @@ -804,9 +804,9 @@ public void testDifferentShardSize() throws Exception { ensureGreen(); indexRandom( true, - client().prepareIndex("test", "type1", "1").setSource("field1", "foobar1").setRouting("1"), - client().prepareIndex("test", "type1", "2").setSource("field1", "foobar2").setRouting("2"), - client().prepareIndex("test", "type1", "3").setSource("field1", "foobar3").setRouting("3") + client().prepareIndex("test").setId("1").setSource("field1", "foobar1").setRouting("1"), + client().prepareIndex("test").setId("2").setSource("field1", "foobar2").setRouting("2"), + client().prepareIndex("test").setId("3").setSource("field1", "foobar3").setRouting("3") ); Suggest suggest = searchSuggest( @@ -1143,7 +1143,7 @@ public void testSuggestWithManyCandidates() throws InterruptedException, Executi List builders = new ArrayList<>(); for (String title : titles) { - builders.add(client().prepareIndex("test", "type1").setSource("title", title)); + builders.add(client().prepareIndex("test").setSource("title", title)); } indexRandom(true, builders); @@ -1181,9 +1181,9 @@ public void testSuggestWithFieldAlias() throws Exception { assertAcked(prepareCreate("test").addMapping("type", mapping)); List builders = new ArrayList<>(); - builders.add(client().prepareIndex("test", "type").setSource("text", "apple")); - builders.add(client().prepareIndex("test", "type").setSource("text", "mango")); - builders.add(client().prepareIndex("test", "type").setSource("text", "papaya")); + builders.add(client().prepareIndex("test").setSource("text", "apple")); + builders.add(client().prepareIndex("test").setSource("text", "mango")); + builders.add(client().prepareIndex("test").setSource("text", "papaya")); indexRandom(true, false, builders); TermSuggestionBuilder termSuggest = termSuggestion("alias").text("appple"); @@ -1208,10 +1208,10 @@ public void testPhraseSuggestMinDocFreq() throws Exception { ); List builders = new ArrayList<>(); - builders.add(client().prepareIndex("test", "type").setSource("text", "apple")); - builders.add(client().prepareIndex("test", "type").setSource("text", "apple")); - builders.add(client().prepareIndex("test", "type").setSource("text", "apple")); - builders.add(client().prepareIndex("test", "type").setSource("text", "appfle")); + builders.add(client().prepareIndex("test").setSource("text", "apple")); + builders.add(client().prepareIndex("test").setSource("text", "apple")); + builders.add(client().prepareIndex("test").setSource("text", "apple")); + builders.add(client().prepareIndex("test").setSource("text", "appfle")); indexRandom(true, false, builders); PhraseSuggestionBuilder phraseSuggest = phraseSuggestion("text").text("appple") @@ -1321,7 +1321,7 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE List builders = new ArrayList<>(); for (String title : titles) { - builders.add(client().prepareIndex("test", "type1").setSource("title", title)); + builders.add(client().prepareIndex("test").setSource("title", title)); } indexRandom(true, builders); diff --git a/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java b/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java index 41d934212193b..57c14876b25ff 100644 --- a/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java @@ -81,7 +81,8 @@ public void testCustomBM25Similarity() throws Exception { .execute() .actionGet(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource("field1", "the quick brown fox jumped over the lazy dog", "field2", "the quick brown fox jumped over the lazy dog") .setRefreshPolicy(IMMEDIATE) .execute() diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/CorruptedBlobStoreRepositoryIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/CorruptedBlobStoreRepositoryIT.java index f0673236a8be6..c253f1a4f876e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/CorruptedBlobStoreRepositoryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/CorruptedBlobStoreRepositoryIT.java @@ -545,7 +545,7 @@ public void testSnapshotWithCorruptedShardIndexFile() throws Exception { final IndexRequestBuilder[] documents = new IndexRequestBuilder[nDocs]; for (int j = 0; j < nDocs; j++) { - documents[j] = client.prepareIndex(indexName, "_doc").setSource("foo", "bar"); + documents[j] = client.prepareIndex(indexName).setSource("foo", "bar"); } indexRandom(true, documents); flushAndRefresh(); @@ -591,7 +591,7 @@ public void testSnapshotWithCorruptedShardIndexFile() throws Exception { logger.info("--> indexing [{}] more documents into [{}]", nDocs, indexName); for (int j = 0; j < nDocs; j++) { - documents[j] = client.prepareIndex(indexName, "_doc").setSource("foo2", "bar2"); + documents[j] = client.prepareIndex(indexName).setSource("foo2", "bar2"); } indexRandom(true, documents); @@ -618,8 +618,8 @@ public void testDeleteSnapshotWithMissingIndexAndShardMetadata() throws Exceptio logger.info("--> indexing some data"); indexRandom( true, - client().prepareIndex("test-idx-1", "_doc").setSource("foo", "bar"), - client().prepareIndex("test-idx-2", "_doc").setSource("foo", "bar") + client().prepareIndex("test-idx-1").setSource("foo", "bar"), + client().prepareIndex("test-idx-2").setSource("foo", "bar") ); logger.info("--> creating snapshot"); @@ -671,8 +671,8 @@ public void testDeleteSnapshotWithMissingMetadata() throws Exception { logger.info("--> indexing some data"); indexRandom( true, - client().prepareIndex("test-idx-1", "_doc").setSource("foo", "bar"), - client().prepareIndex("test-idx-2", "_doc").setSource("foo", "bar") + client().prepareIndex("test-idx-1").setSource("foo", "bar"), + client().prepareIndex("test-idx-2").setSource("foo", "bar") ); logger.info("--> creating snapshot"); @@ -718,8 +718,8 @@ public void testDeleteSnapshotWithCorruptedSnapshotFile() throws Exception { logger.info("--> indexing some data"); indexRandom( true, - client().prepareIndex("test-idx-1", "_doc").setSource("foo", "bar"), - client().prepareIndex("test-idx-2", "_doc").setSource("foo", "bar") + client().prepareIndex("test-idx-1").setSource("foo", "bar"), + client().prepareIndex("test-idx-2").setSource("foo", "bar") ); logger.info("--> creating snapshot"); @@ -775,9 +775,9 @@ public void testDeleteSnapshotWithCorruptedGlobalState() throws Exception { createIndex("test-idx-1", "test-idx-2"); indexRandom( true, - client().prepareIndex("test-idx-1", "_doc").setSource("foo", "bar"), - client().prepareIndex("test-idx-2", "_doc").setSource("foo", "bar"), - client().prepareIndex("test-idx-2", "_doc").setSource("foo", "bar") + client().prepareIndex("test-idx-1").setSource("foo", "bar"), + client().prepareIndex("test-idx-2").setSource("foo", "bar"), + client().prepareIndex("test-idx-2").setSource("foo", "bar") ); flushAndRefresh("test-idx-1", "test-idx-2"); @@ -823,8 +823,8 @@ public void testSnapshotWithMissingShardLevelIndexFile() throws Exception { logger.info("--> indexing some data"); indexRandom( true, - client().prepareIndex("test-idx-1", "_doc").setSource("foo", "bar"), - client().prepareIndex("test-idx-2", "_doc").setSource("foo", "bar") + client().prepareIndex("test-idx-1").setSource("foo", "bar"), + client().prepareIndex("test-idx-2").setSource("foo", "bar") ); logger.info("--> creating snapshot"); diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index 93c1f5a9ef398..47d57e1260b5f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -1014,7 +1014,7 @@ public void testSnapshotTotalAndIncrementalSizes() throws Exception { int docs = between(10, 100); for (int i = 0; i < docs; i++) { - client().prepareIndex(indexName, "type").setSource("test", "init").execute().actionGet(); + client().prepareIndex(indexName).setSource("test", "init").execute().actionGet(); } final Path repoPath = randomRepoPath(); @@ -1047,7 +1047,7 @@ public void testSnapshotTotalAndIncrementalSizes() throws Exception { // add few docs - less than initially docs = between(1, 5); for (int i = 0; i < docs; i++) { - client().prepareIndex(indexName, "type").setSource("test", "test" + i).execute().actionGet(); + client().prepareIndex(indexName).setSource("test", "test" + i).execute().actionGet(); } // create another snapshot @@ -1099,7 +1099,7 @@ public void testDeduplicateIndexMetadata() throws Exception { int docs = between(10, 100); for (int i = 0; i < docs; i++) { - client().prepareIndex(indexName, "_doc").setSource("test", "init").execute().actionGet(); + client().prepareIndex(indexName).setSource("test", "init").execute().actionGet(); } final Path repoPath = randomRepoPath(); @@ -1111,7 +1111,7 @@ public void testDeduplicateIndexMetadata() throws Exception { docs = between(1, 5); for (int i = 0; i < docs; i++) { - client().prepareIndex(indexName, "_doc").setSource("test", "test" + i).execute().actionGet(); + client().prepareIndex(indexName).setSource("test", "test" + i).execute().actionGet(); } logger.info("--> restart random data node and add new data node to change index allocation"); @@ -1131,7 +1131,7 @@ public void testDeduplicateIndexMetadata() throws Exception { // index to some other field to trigger a change in index metadata for (int i = 0; i < docs; i++) { - client().prepareIndex(indexName, "_doc").setSource("new_field", "test" + i).execute().actionGet(); + client().prepareIndex(indexName).setSource("new_field", "test" + i).execute().actionGet(); } createFullSnapshot(repositoryName, snapshot2); @@ -1268,7 +1268,7 @@ public void testRetentionLeasesClearedOnRestore() throws Exception { logger.debug("--> indexing {} docs into {}", snapshotDocCount, indexName); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[snapshotDocCount]; for (int i = 0; i < snapshotDocCount; i++) { - indexRequestBuilders[i] = client().prepareIndex(indexName, "_doc").setSource("field", "value"); + indexRequestBuilders[i] = client().prepareIndex(indexName).setSource("field", "value"); } indexRandom(true, indexRequestBuilders); assertDocCount(indexName, snapshotDocCount); @@ -1293,7 +1293,7 @@ public void testRetentionLeasesClearedOnRestore() throws Exception { logger.debug("--> indexing {} extra docs into {}", extraDocCount, indexName); indexRequestBuilders = new IndexRequestBuilder[extraDocCount]; for (int i = 0; i < extraDocCount; i++) { - indexRequestBuilders[i] = client().prepareIndex(indexName, "_doc").setSource("field", "value"); + indexRequestBuilders[i] = client().prepareIndex(indexName).setSource("field", "value"); } indexRandom(true, indexRequestBuilders); } diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/MetadataLoadingDuringSnapshotRestoreIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/MetadataLoadingDuringSnapshotRestoreIT.java index aad0f2576d2a3..608a439b40fec 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/MetadataLoadingDuringSnapshotRestoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/MetadataLoadingDuringSnapshotRestoreIT.java @@ -81,11 +81,11 @@ public void testWhenMetadataAreLoaded() throws Exception { createIndex("docs"); indexRandom( true, - client().prepareIndex("docs", "doc", "1").setSource("rank", 1), - client().prepareIndex("docs", "doc", "2").setSource("rank", 2), - client().prepareIndex("docs", "doc", "3").setSource("rank", 3), - client().prepareIndex("others", "other").setSource("rank", 4), - client().prepareIndex("others", "other").setSource("rank", 5) + client().prepareIndex("docs").setId("1").setSource("rank", 1), + client().prepareIndex("docs").setId("2").setSource("rank", 2), + client().prepareIndex("docs").setId("3").setSource("rank", 3), + client().prepareIndex("others").setSource("rank", 4), + client().prepareIndex("others").setSource("rank", 5) ); createRepository("repository", CountingMockRepositoryPlugin.TYPE); diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java index b2e8f2df1e2f3..643a301c025c3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java @@ -147,8 +147,8 @@ public void testParallelRestoreOperations() { assertThat(restoreSnapshotResponse1.status(), equalTo(RestStatus.ACCEPTED)); assertThat(restoreSnapshotResponse2.status(), equalTo(RestStatus.ACCEPTED)); ensureGreen(restoredIndexName1, restoredIndexName2); - assertThat(client.prepareGet(restoredIndexName1, "_doc", docId).get().isExists(), equalTo(true)); - assertThat(client.prepareGet(restoredIndexName2, "_doc", docId2).get().isExists(), equalTo(true)); + assertThat(client.prepareGet(restoredIndexName1, docId).get().isExists(), equalTo(true)); + assertThat(client.prepareGet(restoredIndexName2, docId2).get().isExists(), equalTo(true)); } public void testParallelRestoreOperationsFromSingleSnapshot() throws Exception { @@ -206,8 +206,8 @@ public void testParallelRestoreOperationsFromSingleSnapshot() throws Exception { assertThat(restoreSnapshotResponse1.get().status(), equalTo(RestStatus.ACCEPTED)); assertThat(restoreSnapshotResponse2.get().status(), equalTo(RestStatus.ACCEPTED)); ensureGreen(restoredIndexName1, restoredIndexName2); - assertThat(client.prepareGet(restoredIndexName1, "_doc", docId).get().isExists(), equalTo(true)); - assertThat(client.prepareGet(restoredIndexName2, "_doc", sameSourceIndex ? docId : docId2).get().isExists(), equalTo(true)); + assertThat(client.prepareGet(restoredIndexName1, docId).get().isExists(), equalTo(true)); + assertThat(client.prepareGet(restoredIndexName2, sameSourceIndex ? docId : docId2).get().isExists(), equalTo(true)); } public void testRestoreIncreasesPrimaryTerms() { @@ -284,7 +284,7 @@ public void testRestoreWithDifferentMappingsAndSettings() throws Exception { NumShards numShards = getNumShards("test-idx"); - assertAcked(client().admin().indices().preparePutMapping("test-idx").setType("_doc").setSource("baz", "type=text")); + assertAcked(client().admin().indices().preparePutMapping("test-idx").setSource("baz", "type=text")); ensureGreen(); logger.info("--> snapshot it"); @@ -310,7 +310,7 @@ public void testRestoreWithDifferentMappingsAndSettings() throws Exception { .put("refresh_interval", 5, TimeUnit.SECONDS) ) ); - assertAcked(client().admin().indices().preparePutMapping("test-idx").setType("_doc").setSource("foo", "type=text")); + assertAcked(client().admin().indices().preparePutMapping("test-idx").setSource("foo", "type=text")); ensureGreen(); logger.info("--> close index"); @@ -735,13 +735,12 @@ public void testChangeSettingsOnRestore() throws Exception { client().admin() .indices() .preparePutMapping("test-idx") - .setType("_doc") .setSource("field1", "type=text,analyzer=standard,search_analyzer=my_analyzer") ); final int numdocs = randomIntBetween(10, 100); IndexRequestBuilder[] builders = new IndexRequestBuilder[numdocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex("test-idx", "_doc").setId(Integer.toString(i)).setSource("field1", "Foo bar " + i); + builders[i] = client().prepareIndex("test-idx").setId(Integer.toString(i)).setSource("field1", "Foo bar " + i); } indexRandom(true, builders); flushAndRefresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/SharedClusterSnapshotRestoreIT.java index b8b2d4c1b665a..88fcd075a563f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -73,6 +73,7 @@ import org.opensearch.index.IndexService; import org.opensearch.index.engine.Engine; import org.opensearch.index.engine.EngineTestCase; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.shard.IndexShard; import org.opensearch.index.shard.ShardId; import org.opensearch.indices.IndicesService; @@ -200,13 +201,13 @@ public void testBasicWorkFlow() throws Exception { logger.info("--> delete some data"); for (int i = 0; i < 50; i++) { - client().prepareDelete("test-idx-1", "_doc", Integer.toString(i)).get(); + client().prepareDelete("test-idx-1", Integer.toString(i)).get(); } for (int i = 50; i < 100; i++) { - client().prepareDelete("test-idx-2", "_doc", Integer.toString(i)).get(); + client().prepareDelete("test-idx-2", Integer.toString(i)).get(); } for (int i = 0; i < 100; i += 2) { - client().prepareDelete("test-idx-3", "_doc", Integer.toString(i)).get(); + client().prepareDelete("test-idx-3", Integer.toString(i)).get(); } assertAllSuccessful(refresh()); assertDocCount("test-idx-1", 50L); @@ -288,12 +289,11 @@ public void testSingleGetAfterRestore() throws Exception { Path absolutePath = randomRepoPath().toAbsolutePath(); logger.info("Path [{}]", absolutePath); String restoredIndexName = indexName + "-restored"; - String typeName = "actions"; String expectedValue = "expected"; // Write a document String docId = Integer.toString(randomInt()); - index(indexName, typeName, docId, "value", expectedValue); + index(indexName, MapperService.SINGLE_MAPPING_NAME, docId, "value", expectedValue); createRepository(repoName, "fs", absolutePath); createSnapshot(repoName, snapshotName, Collections.singletonList(indexName)); @@ -305,7 +305,7 @@ public void testSingleGetAfterRestore() throws Exception { .get(); assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0)); - assertThat(client().prepareGet(restoredIndexName, typeName, docId).get().isExists(), equalTo(true)); + assertThat(client().prepareGet(restoredIndexName, docId).get().isExists(), equalTo(true)); } public void testFreshIndexUUID() { @@ -1400,7 +1400,7 @@ public void testSnapshotMoreThanOnce() throws InterruptedException { } } - client().prepareDelete("test", "_doc", "1").get(); + client().prepareDelete("test", "1").get(); createSnapshot("test-repo", "test-2", Collections.singletonList("test")); assertThat(getSnapshot("test-repo", "test-2").state(), equalTo(SnapshotState.SUCCESS)); { @@ -1643,9 +1643,9 @@ public void testListCorruptedSnapshot() throws Exception { logger.info("--> indexing some data"); indexRandom( true, - client().prepareIndex("test-idx-1", "_doc").setSource("foo", "bar"), - client().prepareIndex("test-idx-2", "_doc").setSource("foo", "bar"), - client().prepareIndex("test-idx-3", "_doc").setSource("foo", "bar") + client().prepareIndex("test-idx-1").setSource("foo", "bar"), + client().prepareIndex("test-idx-2").setSource("foo", "bar"), + client().prepareIndex("test-idx-3").setSource("foo", "bar") ); logger.info("--> creating 2 snapshots"); @@ -1708,9 +1708,9 @@ public void testRestoreSnapshotWithCorruptedGlobalState() throws Exception { createIndex("test-idx-1", "test-idx-2"); indexRandom( true, - client().prepareIndex("test-idx-1", "_doc").setSource("foo", "bar"), - client().prepareIndex("test-idx-2", "_doc").setSource("foo", "bar"), - client().prepareIndex("test-idx-2", "_doc").setSource("foo", "bar") + client().prepareIndex("test-idx-1").setSource("foo", "bar"), + client().prepareIndex("test-idx-2").setSource("foo", "bar"), + client().prepareIndex("test-idx-2").setSource("foo", "bar") ); flushAndRefresh("test-idx-1", "test-idx-2"); @@ -1771,7 +1771,7 @@ public void testRestoreSnapshotWithCorruptedIndexMetadata() throws Exception { IndexRequestBuilder[] documents = new IndexRequestBuilder[nbDocs]; for (int j = 0; j < nbDocs; j++) { - documents[j] = client.prepareIndex(indexName, "_doc").setSource("foo", "bar"); + documents[j] = client.prepareIndex(indexName).setSource("foo", "bar"); } indexRandom(true, documents); } diff --git a/server/src/internalClusterTest/java/org/opensearch/threadpool/SimpleThreadPoolIT.java b/server/src/internalClusterTest/java/org/opensearch/threadpool/SimpleThreadPoolIT.java index 390600b2667d3..341725866b545 100644 --- a/server/src/internalClusterTest/java/org/opensearch/threadpool/SimpleThreadPoolIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/threadpool/SimpleThreadPoolIT.java @@ -72,7 +72,7 @@ public void testThreadNames() throws Exception { int numDocs = randomIntBetween(2, 100); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; ++i) { - builders[i] = client().prepareIndex("idx", "type") + builders[i] = client().prepareIndex("idx") .setSource( jsonBuilder().startObject() .field("str_value", "s" + i) diff --git a/server/src/internalClusterTest/java/org/opensearch/update/UpdateIT.java b/server/src/internalClusterTest/java/org/opensearch/update/UpdateIT.java index 661c572d4c959..c6ec91a6ab078 100644 --- a/server/src/internalClusterTest/java/org/opensearch/update/UpdateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/update/UpdateIT.java @@ -167,7 +167,7 @@ public void testUpsert() throws Exception { ensureGreen(); Script fieldIncScript = new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field")); - UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject()) .setScript(fieldIncScript) .execute() @@ -176,11 +176,11 @@ public void testUpsert() throws Exception { assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { - GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); + GetResponse getResponse = client().prepareGet("test", "1").execute().actionGet(); assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("1")); } - updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject()) .setScript(fieldIncScript) .execute() @@ -189,7 +189,7 @@ public void testUpsert() throws Exception { assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { - GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); + GetResponse getResponse = client().prepareGet("test", "1").execute().actionGet(); assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("2")); } } @@ -209,7 +209,7 @@ public void testScriptedUpsert() throws Exception { // Pay money from what will be a new account and opening balance comes from upsert doc // provided by client - UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("balance", openingBalance).endObject()) .setScriptedUpsert(true) .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, UPSERT_SCRIPT, params)) @@ -219,12 +219,12 @@ public void testScriptedUpsert() throws Exception { assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { - GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); + GetResponse getResponse = client().prepareGet("test", "1").execute().actionGet(); assertThat(getResponse.getSourceAsMap().get("balance").toString(), equalTo("9")); } // Now pay money for an existing account where balance is stored in es - updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("balance", openingBalance).endObject()) .setScriptedUpsert(true) .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, UPSERT_SCRIPT, params)) @@ -234,7 +234,7 @@ public void testScriptedUpsert() throws Exception { assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { - GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); + GetResponse getResponse = client().prepareGet("test", "1").execute().actionGet(); assertThat(getResponse.getSourceAsMap().get("balance").toString(), equalTo("7")); } } @@ -243,7 +243,7 @@ public void testUpsertDoc() throws Exception { createTestIndex(); ensureGreen(); - UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setDocAsUpsert(true) .setFetchSource(true) @@ -261,7 +261,7 @@ public void testNotUpsertDoc() throws Exception { ensureGreen(); assertFutureThrows( - client().prepareUpdate(indexOrAlias(), "type1", "1") + client().prepareUpdate(indexOrAlias(), "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setDocAsUpsert(false) .setFetchSource(true) @@ -274,7 +274,7 @@ public void testUpsertFields() throws Exception { createTestIndex(); ensureGreen(); - UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, PUT_VALUES_SCRIPT, Collections.singletonMap("extra", "foo"))) .setFetchSource(true) @@ -287,7 +287,7 @@ public void testUpsertFields() throws Exception { assertThat(updateResponse.getGetResult().sourceAsMap().get("bar").toString(), equalTo("baz")); assertThat(updateResponse.getGetResult().sourceAsMap().get("extra"), nullValue()); - updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, PUT_VALUES_SCRIPT, Collections.singletonMap("extra", "foo"))) .setFetchSource(true) @@ -302,7 +302,7 @@ public void testUpsertFields() throws Exception { } public void testIndexAutoCreation() throws Exception { - UpdateResponse updateResponse = client().prepareUpdate("test", "type1", "1") + UpdateResponse updateResponse = client().prepareUpdate("test", "1") .setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, PUT_VALUES_SCRIPT, Collections.singletonMap("extra", "foo"))) .setFetchSource(true) @@ -324,29 +324,26 @@ public void testUpdate() throws Exception { Script fieldIncScript = new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field")); DocumentMissingException ex = expectThrows( DocumentMissingException.class, - () -> client().prepareUpdate(indexOrAlias(), "type1", "1").setScript(fieldIncScript).execute().actionGet() + () -> client().prepareUpdate(indexOrAlias(), "1").setScript(fieldIncScript).execute().actionGet() ); - assertEquals("[type1][1]: document missing", ex.getMessage()); + assertEquals("[1]: document missing", ex.getMessage()); - client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet(); + client().prepareIndex("test").setId("1").setSource("field", 1).execute().actionGet(); - UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") - .setScript(fieldIncScript) - .execute() - .actionGet(); + UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "1").setScript(fieldIncScript).execute().actionGet(); assertThat(updateResponse.getVersion(), equalTo(2L)); assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { - GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); + GetResponse getResponse = client().prepareGet("test", "1").execute().actionGet(); assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("2")); } Map params = new HashMap<>(); params.put("inc", 3); params.put("field", "field"); - updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, params)) .execute() .actionGet(); @@ -355,12 +352,12 @@ public void testUpdate() throws Exception { assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { - GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); + GetResponse getResponse = client().prepareGet("test", "1").execute().actionGet(); assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("5")); } // check noop - updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setScript( new Script( ScriptType.INLINE, @@ -376,12 +373,12 @@ public void testUpdate() throws Exception { assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { - GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); + GetResponse getResponse = client().prepareGet("test", "1").execute().actionGet(); assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("5")); } // check delete - updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setScript( new Script( ScriptType.INLINE, @@ -397,13 +394,13 @@ public void testUpdate() throws Exception { assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { - GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); + GetResponse getResponse = client().prepareGet("test", "1").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(false)); } // check _source parameter - client().prepareIndex("test", "type1", "1").setSource("field1", 1, "field2", 2).execute().actionGet(); - updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + client().prepareIndex("test").setId("1").setSource("field1", 1, "field2", 2).execute().actionGet(); + updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field1"))) .setFetchSource("field1", "field2") .get(); @@ -417,24 +414,24 @@ public void testUpdate() throws Exception { // check updates without script // add new field - client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet(); - client().prepareUpdate(indexOrAlias(), "type1", "1") + client().prepareIndex("test").setId("1").setSource("field", 1).execute().actionGet(); + client().prepareUpdate(indexOrAlias(), "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("field2", 2).endObject()) .execute() .actionGet(); for (int i = 0; i < 5; i++) { - GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); + GetResponse getResponse = client().prepareGet("test", "1").execute().actionGet(); assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("1")); assertThat(getResponse.getSourceAsMap().get("field2").toString(), equalTo("2")); } // change existing field - client().prepareUpdate(indexOrAlias(), "type1", "1") + client().prepareUpdate(indexOrAlias(), "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("field", 3).endObject()) .execute() .actionGet(); for (int i = 0; i < 5; i++) { - GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); + GetResponse getResponse = client().prepareGet("test", "1").execute().actionGet(); assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("3")); assertThat(getResponse.getSourceAsMap().get("field2").toString(), equalTo("2")); } @@ -449,13 +446,13 @@ public void testUpdate() throws Exception { testMap.put("commonkey", testMap2); testMap.put("map1", 8); - client().prepareIndex("test", "type1", "1").setSource("map", testMap).execute().actionGet(); - client().prepareUpdate(indexOrAlias(), "type1", "1") + client().prepareIndex("test").setId("1").setSource("map", testMap).execute().actionGet(); + client().prepareUpdate(indexOrAlias(), "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("map", testMap3).endObject()) .execute() .actionGet(); for (int i = 0; i < 5; i++) { - GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); + GetResponse getResponse = client().prepareGet("test", "1").execute().actionGet(); Map map1 = (Map) getResponse.getSourceAsMap().get("map"); assertThat(map1.size(), equalTo(3)); assertThat(map1.containsKey("map1"), equalTo(true)); @@ -473,10 +470,10 @@ public void testUpdateWithIfSeqNo() throws Exception { createTestIndex(); ensureGreen(); - IndexResponse result = client().prepareIndex("test", "type1", "1").setSource("field", 1).get(); + IndexResponse result = client().prepareIndex("test").setId("1").setSource("field", 1).get(); expectThrows( VersionConflictEngineException.class, - () -> client().prepareUpdate(indexOrAlias(), "type1", "1") + () -> client().prepareUpdate(indexOrAlias(), "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("field", 2).endObject()) .setIfSeqNo(result.getSeqNo() + 1) .setIfPrimaryTerm(result.getPrimaryTerm()) @@ -485,7 +482,7 @@ public void testUpdateWithIfSeqNo() throws Exception { expectThrows( VersionConflictEngineException.class, - () -> client().prepareUpdate(indexOrAlias(), "type1", "1") + () -> client().prepareUpdate(indexOrAlias(), "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("field", 2).endObject()) .setIfSeqNo(result.getSeqNo()) .setIfPrimaryTerm(result.getPrimaryTerm() + 1) @@ -494,14 +491,14 @@ public void testUpdateWithIfSeqNo() throws Exception { expectThrows( VersionConflictEngineException.class, - () -> client().prepareUpdate(indexOrAlias(), "type1", "1") + () -> client().prepareUpdate(indexOrAlias(), "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("field", 2).endObject()) .setIfSeqNo(result.getSeqNo() + 1) .setIfPrimaryTerm(result.getPrimaryTerm() + 1) .get() ); - UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") + UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("field", 2).endObject()) .setIfSeqNo(result.getSeqNo()) .setIfPrimaryTerm(result.getPrimaryTerm()) @@ -517,7 +514,7 @@ public void testUpdateRequestWithBothScriptAndDoc() throws Exception { Script fieldIncScript = new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field")); try { - client().prepareUpdate(indexOrAlias(), "type1", "1") + client().prepareUpdate(indexOrAlias(), "1") .setDoc(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject()) .setScript(fieldIncScript) .execute() @@ -535,7 +532,7 @@ public void testUpdateRequestWithScriptAndShouldUpsertDoc() throws Exception { ensureGreen(); Script fieldIncScript = new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field")); try { - client().prepareUpdate(indexOrAlias(), "type1", "1").setScript(fieldIncScript).setDocAsUpsert(true).execute().actionGet(); + client().prepareUpdate(indexOrAlias(), "1").setScript(fieldIncScript).setDocAsUpsert(true).execute().actionGet(); fail("Should have thrown ActionRequestValidationException"); } catch (ActionRequestValidationException e) { assertThat(e.validationErrors().size(), equalTo(1)); @@ -551,23 +548,16 @@ public void testContextVariables() throws Exception { // Index some documents client().prepareIndex() .setIndex("test") - .setType("type1") .setId("id1") .setRouting("routing1") .setSource("field1", 1, "content", "foo") .execute() .actionGet(); - client().prepareIndex() - .setIndex("test") - .setType("type1") - .setId("id2") - .setSource("field1", 0, "content", "bar") - .execute() - .actionGet(); + client().prepareIndex().setIndex("test").setId("id2").setSource("field1", 0, "content", "bar").execute().actionGet(); // Update the first object and note context variables values - UpdateResponse updateResponse = client().prepareUpdate("test", "type1", "id1") + UpdateResponse updateResponse = client().prepareUpdate("test", "id1") .setRouting("routing1") .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, EXTRACT_CTX_SCRIPT, Collections.emptyMap())) .execute() @@ -575,26 +565,24 @@ public void testContextVariables() throws Exception { assertEquals(2, updateResponse.getVersion()); - GetResponse getResponse = client().prepareGet("test", "type1", "id1").setRouting("routing1").execute().actionGet(); + GetResponse getResponse = client().prepareGet("test", "id1").setRouting("routing1").execute().actionGet(); Map updateContext = (Map) getResponse.getSourceAsMap().get("update_context"); assertEquals("test", updateContext.get("_index")); - assertEquals("type1", updateContext.get("_type")); assertEquals("id1", updateContext.get("_id")); assertEquals(1, updateContext.get("_version")); assertEquals("routing1", updateContext.get("_routing")); // Idem with the second object - updateResponse = client().prepareUpdate("test", "type1", "id2") + updateResponse = client().prepareUpdate("test", "id2") .setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, EXTRACT_CTX_SCRIPT, Collections.emptyMap())) .execute() .actionGet(); assertEquals(2, updateResponse.getVersion()); - getResponse = client().prepareGet("test", "type1", "id2").execute().actionGet(); + getResponse = client().prepareGet("test", "id2").execute().actionGet(); updateContext = (Map) getResponse.getSourceAsMap().get("update_context"); assertEquals("test", updateContext.get("_index")); - assertEquals("type1", updateContext.get("_type")); assertEquals("id2", updateContext.get("_id")); assertEquals(1, updateContext.get("_version")); assertNull(updateContext.get("_routing")); @@ -629,17 +617,13 @@ public void run() { ); } if (useBulkApi) { - UpdateRequestBuilder updateRequestBuilder = client().prepareUpdate( - indexOrAlias(), - "type1", - Integer.toString(i) - ) + UpdateRequestBuilder updateRequestBuilder = client().prepareUpdate(indexOrAlias(), Integer.toString(i)) .setScript(fieldIncScript) .setRetryOnConflict(Integer.MAX_VALUE) .setUpsert(jsonBuilder().startObject().field("field", 1).endObject()); client().prepareBulk().add(updateRequestBuilder).execute().actionGet(); } else { - client().prepareUpdate(indexOrAlias(), "type1", Integer.toString(i)) + client().prepareUpdate(indexOrAlias(), Integer.toString(i)) .setScript(fieldIncScript) .setRetryOnConflict(Integer.MAX_VALUE) .setUpsert(jsonBuilder().startObject().field("field", 1).endObject()) @@ -675,7 +659,7 @@ public void run() { } assertThat(failures.size(), equalTo(0)); for (int i = 0; i < numberOfUpdatesPerThread; i++) { - GetResponse response = client().prepareGet("test", "type1", Integer.toString(i)).execute().actionGet(); + GetResponse response = client().prepareGet("test", Integer.toString(i)).execute().actionGet(); assertThat(response.getId(), equalTo(Integer.toString(i))); assertThat(response.isExists(), equalTo(true)); assertThat(response.getVersion(), equalTo((long) numberOfThreads)); @@ -761,7 +745,7 @@ public void run() { for (int k = 0; k < numberOfUpdatesPerId; ++k) { updateRequestsOutstanding.acquire(); try { - UpdateRequest ur = client().prepareUpdate("test", "type1", Integer.toString(j)) + UpdateRequest ur = client().prepareUpdate("test", Integer.toString(j)) .setScript(fieldIncScript) .setRetryOnConflict(retryOnConflict) .setUpsert(jsonBuilder().startObject().field("field", 1).endObject()) @@ -793,7 +777,7 @@ public void run() { try { deleteRequestsOutstanding.acquire(); - DeleteRequest dr = client().prepareDelete("test", "type1", Integer.toString(j)).request(); + DeleteRequest dr = client().prepareDelete("test", Integer.toString(j)).request(); client().delete(dr, new DeleteListener(j)); } catch (NoNodeAvailableException nne) { deleteRequestsOutstanding.release(); @@ -880,7 +864,7 @@ private void waitForOutstandingRequests(TimeValue timeOut, Semaphore requestsOut // This means that we add 1 to the expected versions and attempts // All the previous operations should be complete or failed at this point for (int i = 0; i < numberOfIdsPerThread; ++i) { - client().prepareUpdate("test", "type1", Integer.toString(i)) + client().prepareUpdate("test", Integer.toString(i)) .setScript(fieldIncScript) .setRetryOnConflict(Integer.MAX_VALUE) .setUpsert(jsonBuilder().startObject().field("field", 1).endObject()) @@ -892,7 +876,7 @@ private void waitForOutstandingRequests(TimeValue timeOut, Semaphore requestsOut for (int i = 0; i < numberOfIdsPerThread; ++i) { int totalFailures = 0; - GetResponse response = client().prepareGet("test", "type1", Integer.toString(i)).execute().actionGet(); + GetResponse response = client().prepareGet("test", Integer.toString(i)).execute().actionGet(); if (response.isExists()) { assertThat(response.getId(), equalTo(Integer.toString(i))); int expectedVersion = (numberOfThreads * numberOfUpdatesPerId * 2) + 1; diff --git a/server/src/internalClusterTest/java/org/opensearch/update/UpdateNoopIT.java b/server/src/internalClusterTest/java/org/opensearch/update/UpdateNoopIT.java index c2eb76a0dbe62..606a5fe1b7eca 100644 --- a/server/src/internalClusterTest/java/org/opensearch/update/UpdateNoopIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/update/UpdateNoopIT.java @@ -327,7 +327,7 @@ private void updateAndCheckSource(long expectedSeqNo, long expectedVersion, Bool } private UpdateResponse update(Boolean detectNoop, long expectedSeqNo, long expectedVersion, XContentBuilder xContentBuilder) { - UpdateRequestBuilder updateRequest = client().prepareUpdate("test", "type1", "1") + UpdateRequestBuilder updateRequest = client().prepareUpdate("test", "1") .setDoc(xContentBuilder) .setDocAsUpsert(true) .setFetchSource(true); diff --git a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java index 67c1ff681cb60..30ab282bf3d44 100644 --- a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java @@ -39,6 +39,7 @@ import org.opensearch.common.unit.Fuzziness; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.index.IndexNotFoundException; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.MoreLikeThisQueryBuilder.Item; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; @@ -61,6 +62,7 @@ import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; import static org.opensearch.index.query.QueryBuilders.queryStringQuery; +import static org.opensearch.index.query.QueryBuilders.rangeQuery; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.allOf; @@ -77,11 +79,10 @@ public void testSimpleValidateQuery() throws Exception { client().admin() .indices() .preparePutMapping("test") - .setType("type1") .setSource( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("foo") .field("type", "text") @@ -179,11 +180,10 @@ public void testExplainValidateQueryTwoNodes() throws IOException { client().admin() .indices() .preparePutMapping("test") - .setType("type1") .setSource( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("foo") .field("type", "text") @@ -255,7 +255,7 @@ public void testExplainDateRangeInQueryString() { String aMonthAgo = DateTimeFormatter.ISO_LOCAL_DATE.format(now.plus(1, ChronoUnit.MONTHS)); String aMonthFromNow = DateTimeFormatter.ISO_LOCAL_DATE.format(now.minus(1, ChronoUnit.MONTHS)); - client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); + client().prepareIndex("test").setId("1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); refresh(); @@ -319,13 +319,13 @@ public void testExplainWithRewriteValidateQuery() throws Exception { client().admin() .indices() .prepareCreate("test") - .addMapping("type1", "field", "type=text,analyzer=whitespace") + .addMapping(MapperService.SINGLE_MAPPING_NAME, "field", "type=text,analyzer=whitespace") .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1)) .get(); - client().prepareIndex("test", "type1", "1").setSource("field", "quick lazy huge brown pidgin").get(); - client().prepareIndex("test", "type1", "2").setSource("field", "the quick brown fox").get(); - client().prepareIndex("test", "type1", "3").setSource("field", "the quick lazy huge brown fox jumps over the tree").get(); - client().prepareIndex("test", "type1", "4").setSource("field", "the lazy dog quacks like a duck").get(); + client().prepareIndex("test").setId("1").setSource("field", "quick lazy huge brown pidgin").get(); + client().prepareIndex("test").setId("2").setSource("field", "the quick brown fox").get(); + client().prepareIndex("test").setId("3").setSource("field", "the quick lazy huge brown fox jumps over the tree").get(); + client().prepareIndex("test").setId("4").setSource("field", "the lazy dog quacks like a duck").get(); refresh(); // prefix queries @@ -357,7 +357,7 @@ public void testExplainWithRewriteValidateQuery() throws Exception { assertExplanation(QueryBuilders.fuzzyQuery("field", "jump"), containsString("(field:jumps)^0.75"), true); // more like this queries - Item[] items = new Item[] { new Item(null, null, "1") }; + Item[] items = new Item[] { new Item(null, "1") }; assertExplanation( QueryBuilders.moreLikeThisQuery(new String[] { "field" }, null, items) .include(true) @@ -381,15 +381,15 @@ public void testExplainWithRewriteValidateQueryAllShards() throws Exception { client().admin() .indices() .prepareCreate("test") - .addMapping("type1", "field", "type=text,analyzer=whitespace") + .addMapping(MapperService.SINGLE_MAPPING_NAME, "field", "type=text,analyzer=whitespace") .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put("index.number_of_routing_shards", 2)) .get(); // We are relying on specific routing behaviors for the result to be right, so // we cannot randomize the number of shards or change ids here. - client().prepareIndex("test", "type1", "1").setSource("field", "quick lazy huge brown pidgin").get(); - client().prepareIndex("test", "type1", "2").setSource("field", "the quick brown fox").get(); - client().prepareIndex("test", "type1", "3").setSource("field", "the quick lazy huge brown fox jumps over the tree").get(); - client().prepareIndex("test", "type1", "4").setSource("field", "the lazy dog quacks like a duck").get(); + client().prepareIndex("test").setId("1").setSource("field", "quick lazy huge brown pidgin").get(); + client().prepareIndex("test").setId("2").setSource("field", "the quick brown fox").get(); + client().prepareIndex("test").setId("3").setSource("field", "the quick lazy huge brown fox jumps over the tree").get(); + client().prepareIndex("test").setId("4").setSource("field", "the lazy dog quacks like a duck").get(); refresh(); // prefix queries @@ -447,7 +447,6 @@ private static void assertExplanation(QueryBuilder queryBuilder, Matcher ValidateQueryResponse response = client().admin() .indices() .prepareValidateQuery("test") - .setTypes("type1") .setQuery(queryBuilder) .setExplain(true) .setRewrite(withRewrite) @@ -468,7 +467,6 @@ private static void assertExplanations( ValidateQueryResponse response = client().admin() .indices() .prepareValidateQuery("test") - .setTypes("type1") .setQuery(queryBuilder) .setExplain(true) .setRewrite(withRewrite) @@ -490,18 +488,113 @@ public void testExplainTermsQueryWithLookup() throws Exception { .addMapping("_doc", "user", "type=integer", "followers", "type=integer") .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put("index.number_of_routing_shards", 2)) .get(); - client().prepareIndex("twitter", "_doc", "1").setSource("followers", new int[] { 1, 2, 3 }).get(); + client().prepareIndex("twitter").setId("1").setSource("followers", new int[] { 1, 2, 3 }).get(); refresh(); - TermsQueryBuilder termsLookupQuery = QueryBuilders.termsLookupQuery("user", new TermsLookup("twitter", "_doc", "1", "followers")); + TermsQueryBuilder termsLookupQuery = QueryBuilders.termsLookupQuery("user", new TermsLookup("twitter", "1", "followers")); ValidateQueryResponse response = client().admin() .indices() .prepareValidateQuery("twitter") - .setTypes("_doc") .setQuery(termsLookupQuery) .setExplain(true) .execute() .actionGet(); assertThat(response.isValid(), is(true)); } + + // Issue: https://github.com/opensearch-project/OpenSearch/issues/2036 + public void testValidateDateRangeInQueryString() throws IOException { + assertAcked(prepareCreate("test").setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1))); + + assertAcked( + client().admin() + .indices() + .preparePutMapping("test") + .setSource( + XContentFactory.jsonBuilder() + .startObject() + .startObject(MapperService.SINGLE_MAPPING_NAME) + .startObject("properties") + .startObject("name") + .field("type", "keyword") + .endObject() + .startObject("timestamp") + .field("type", "date") + .endObject() + .endObject() + .endObject() + .endObject() + ) + ); + + client().prepareIndex("test").setId("1").setSource("name", "username", "timestamp", 200).get(); + refresh(); + + ValidateQueryResponse response = client().admin() + .indices() + .prepareValidateQuery() + .setQuery( + QueryBuilders.boolQuery() + .must(rangeQuery("timestamp").gte(0).lte(100)) + .must(queryStringQuery("username").allowLeadingWildcard(false)) + ) + .setRewrite(true) + .get(); + + assertNoFailures(response); + assertThat(response.isValid(), is(true)); + + // Use wildcard and date outside the range + response = client().admin() + .indices() + .prepareValidateQuery() + .setQuery( + QueryBuilders.boolQuery() + .must(rangeQuery("timestamp").gte(0).lte(100)) + .must(queryStringQuery("*erna*").allowLeadingWildcard(false)) + ) + .setRewrite(true) + .get(); + + assertNoFailures(response); + assertThat(response.isValid(), is(false)); + + // Use wildcard and date inside the range + response = client().admin() + .indices() + .prepareValidateQuery() + .setQuery( + QueryBuilders.boolQuery() + .must(rangeQuery("timestamp").gte(0).lte(1000)) + .must(queryStringQuery("*erna*").allowLeadingWildcard(false)) + ) + .setRewrite(true) + .get(); + + assertNoFailures(response); + assertThat(response.isValid(), is(false)); + + // Use wildcard and date inside the range (allow leading wildcard) + response = client().admin() + .indices() + .prepareValidateQuery() + .setQuery(QueryBuilders.boolQuery().must(rangeQuery("timestamp").gte(0).lte(1000)).must(queryStringQuery("*erna*"))) + .setRewrite(true) + .get(); + + assertNoFailures(response); + assertThat(response.isValid(), is(true)); + + // Use invalid date range + response = client().admin() + .indices() + .prepareValidateQuery() + .setQuery(QueryBuilders.boolQuery().must(rangeQuery("timestamp").gte("aaa").lte(100))) + .setRewrite(true) + .get(); + + assertNoFailures(response); + assertThat(response.isValid(), is(false)); + + } } diff --git a/server/src/internalClusterTest/java/org/opensearch/versioning/ConcurrentDocumentOperationIT.java b/server/src/internalClusterTest/java/org/opensearch/versioning/ConcurrentDocumentOperationIT.java index 1eb4d088d260d..e433a489ad572 100644 --- a/server/src/internalClusterTest/java/org/opensearch/versioning/ConcurrentDocumentOperationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/versioning/ConcurrentDocumentOperationIT.java @@ -55,7 +55,7 @@ public void testConcurrentOperationOnSameDoc() throws Exception { final AtomicReference failure = new AtomicReference<>(); final CountDownLatch latch = new CountDownLatch(numberOfUpdates); for (int i = 0; i < numberOfUpdates; i++) { - client().prepareIndex("test", "type1", "1").setSource("field1", i).execute(new ActionListener() { + client().prepareIndex("test").setId("1").setSource("field1", i).execute(new ActionListener() { @Override public void onResponse(IndexResponse response) { latch.countDown(); @@ -77,9 +77,9 @@ public void onFailure(Exception e) { client().admin().indices().prepareRefresh().execute().actionGet(); logger.info("done indexing, check all have the same field value"); - Map masterSource = client().prepareGet("test", "type1", "1").execute().actionGet().getSourceAsMap(); + Map masterSource = client().prepareGet("test", "1").execute().actionGet().getSourceAsMap(); for (int i = 0; i < (cluster().size() * 5); i++) { - assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().getSourceAsMap(), equalTo(masterSource)); + assertThat(client().prepareGet("test", "1").execute().actionGet().getSourceAsMap(), equalTo(masterSource)); } } } diff --git a/server/src/internalClusterTest/java/org/opensearch/versioning/ConcurrentSeqNoVersioningIT.java b/server/src/internalClusterTest/java/org/opensearch/versioning/ConcurrentSeqNoVersioningIT.java index c1dd439cce4aa..2194152284d37 100644 --- a/server/src/internalClusterTest/java/org/opensearch/versioning/ConcurrentSeqNoVersioningIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/versioning/ConcurrentSeqNoVersioningIT.java @@ -160,7 +160,7 @@ public void testSeqNoCASLinearizability() { logger.info("--> Indexing initial doc for {} keys", numberOfKeys); List partitions = IntStream.range(0, numberOfKeys) - .mapToObj(i -> client().prepareIndex("test", "type", "ID:" + i).setSource("value", -1).get()) + .mapToObj(i -> client().prepareIndex("test").setId("ID:" + i).setSource("value", -1).get()) .map(response -> new Partition(response.getId(), new Version(response.getPrimaryTerm(), response.getSeqNo()))) .collect(Collectors.toList()); @@ -255,7 +255,8 @@ public void run() { version = version.previousTerm(); } - IndexRequest indexRequest = new IndexRequest("test", "type", partition.id).source("value", random.nextInt()) + IndexRequest indexRequest = new IndexRequest("test").id(partition.id) + .source("value", random.nextInt()) .setIfPrimaryTerm(version.primaryTerm) .setIfSeqNo(version.seqNo); Consumer historyResponse = partition.invoke(version); diff --git a/server/src/internalClusterTest/java/org/opensearch/versioning/SimpleVersioningIT.java b/server/src/internalClusterTest/java/org/opensearch/versioning/SimpleVersioningIT.java index 5edb3cf5f314d..629b20edbb44d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/versioning/SimpleVersioningIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/versioning/SimpleVersioningIT.java @@ -72,7 +72,7 @@ public void testExternalVersioningInitialDelete() throws Exception { // Note - external version doesn't throw version conflicts on deletes of non existent records. // This is different from internal versioning - DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1") + DeleteResponse deleteResponse = client().prepareDelete("test", "1") .setVersion(17) .setVersionType(VersionType.EXTERNAL) .execute() @@ -81,7 +81,8 @@ public void testExternalVersioningInitialDelete() throws Exception { // this should conflict with the delete command transaction which told us that the object was deleted at version 17. assertFutureThrows( - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setVersion(13) .setVersionType(VersionType.EXTERNAL) @@ -89,7 +90,8 @@ public void testExternalVersioningInitialDelete() throws Exception { VersionConflictEngineException.class ); - IndexResponse indexResponse = client().prepareIndex("test", "type", "1") + IndexResponse indexResponse = client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setVersion(18) .setVersionType(VersionType.EXTERNAL) @@ -101,21 +103,24 @@ public void testExternalVersioningInitialDelete() throws Exception { public void testExternalGTE() throws Exception { createIndex("test"); - IndexResponse indexResponse = client().prepareIndex("test", "type", "1") + IndexResponse indexResponse = client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setVersion(12) .setVersionType(VersionType.EXTERNAL_GTE) .get(); assertThat(indexResponse.getVersion(), equalTo(12L)); - indexResponse = client().prepareIndex("test", "type", "1") + indexResponse = client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_2") .setVersion(12) .setVersionType(VersionType.EXTERNAL_GTE) .get(); assertThat(indexResponse.getVersion(), equalTo(12L)); - indexResponse = client().prepareIndex("test", "type", "1") + indexResponse = client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_2") .setVersion(14) .setVersionType(VersionType.EXTERNAL_GTE) @@ -123,7 +128,8 @@ public void testExternalGTE() throws Exception { assertThat(indexResponse.getVersion(), equalTo(14L)); assertRequestBuilderThrows( - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setVersion(13) .setVersionType(VersionType.EXTERNAL_GTE), @@ -135,18 +141,18 @@ public void testExternalGTE() throws Exception { refresh(); } for (int i = 0; i < 10; i++) { - assertThat(client().prepareGet("test", "type", "1").get().getVersion(), equalTo(14L)); + assertThat(client().prepareGet("test", "1").get().getVersion(), equalTo(14L)); } // deleting with a lower version fails. assertRequestBuilderThrows( - client().prepareDelete("test", "type", "1").setVersion(2).setVersionType(VersionType.EXTERNAL_GTE), + client().prepareDelete("test", "1").setVersion(2).setVersionType(VersionType.EXTERNAL_GTE), VersionConflictEngineException.class ); // Delete with a higher or equal version deletes all versions up to the given one. long v = randomIntBetween(14, 17); - DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1") + DeleteResponse deleteResponse = client().prepareDelete("test", "1") .setVersion(v) .setVersionType(VersionType.EXTERNAL_GTE) .execute() @@ -156,16 +162,12 @@ public void testExternalGTE() throws Exception { // Deleting with a lower version keeps on failing after a delete. assertFutureThrows( - client().prepareDelete("test", "type", "1").setVersion(2).setVersionType(VersionType.EXTERNAL_GTE).execute(), + client().prepareDelete("test", "1").setVersion(2).setVersionType(VersionType.EXTERNAL_GTE).execute(), VersionConflictEngineException.class ); // But delete with a higher version is OK. - deleteResponse = client().prepareDelete("test", "type", "1") - .setVersion(18) - .setVersionType(VersionType.EXTERNAL_GTE) - .execute() - .actionGet(); + deleteResponse = client().prepareDelete("test", "1").setVersion(18).setVersionType(VersionType.EXTERNAL_GTE).execute().actionGet(); assertEquals(DocWriteResponse.Result.NOT_FOUND, deleteResponse.getResult()); assertThat(deleteResponse.getVersion(), equalTo(18L)); } @@ -174,7 +176,8 @@ public void testExternalVersioning() throws Exception { createIndex("test"); ensureGreen(); - IndexResponse indexResponse = client().prepareIndex("test", "type", "1") + IndexResponse indexResponse = client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setVersion(12) .setVersionType(VersionType.EXTERNAL) @@ -182,7 +185,8 @@ public void testExternalVersioning() throws Exception { .actionGet(); assertThat(indexResponse.getVersion(), equalTo(12L)); - indexResponse = client().prepareIndex("test", "type", "1") + indexResponse = client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setVersion(14) .setVersionType(VersionType.EXTERNAL) @@ -191,7 +195,8 @@ public void testExternalVersioning() throws Exception { assertThat(indexResponse.getVersion(), equalTo(14L)); assertFutureThrows( - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setVersion(13) .setVersionType(VersionType.EXTERNAL) @@ -203,17 +208,17 @@ public void testExternalVersioning() throws Exception { refresh(); } for (int i = 0; i < 10; i++) { - assertThat(client().prepareGet("test", "type", "1").execute().actionGet().getVersion(), equalTo(14L)); + assertThat(client().prepareGet("test", "1").execute().actionGet().getVersion(), equalTo(14L)); } // deleting with a lower version fails. assertFutureThrows( - client().prepareDelete("test", "type", "1").setVersion(2).setVersionType(VersionType.EXTERNAL).execute(), + client().prepareDelete("test", "1").setVersion(2).setVersionType(VersionType.EXTERNAL).execute(), VersionConflictEngineException.class ); // Delete with a higher version deletes all versions up to the given one. - DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1") + DeleteResponse deleteResponse = client().prepareDelete("test", "1") .setVersion(17) .setVersionType(VersionType.EXTERNAL) .execute() @@ -223,22 +228,19 @@ public void testExternalVersioning() throws Exception { // Deleting with a lower version keeps on failing after a delete. assertFutureThrows( - client().prepareDelete("test", "type", "1").setVersion(2).setVersionType(VersionType.EXTERNAL).execute(), + client().prepareDelete("test", "1").setVersion(2).setVersionType(VersionType.EXTERNAL).execute(), VersionConflictEngineException.class ); // But delete with a higher version is OK. - deleteResponse = client().prepareDelete("test", "type", "1") - .setVersion(18) - .setVersionType(VersionType.EXTERNAL) - .execute() - .actionGet(); + deleteResponse = client().prepareDelete("test", "1").setVersion(18).setVersionType(VersionType.EXTERNAL).execute().actionGet(); assertEquals(DocWriteResponse.Result.NOT_FOUND, deleteResponse.getResult()); assertThat(deleteResponse.getVersion(), equalTo(18L)); // TODO: This behavior breaks rest api returning http status 201 // good news is that it this is only the case until deletes GC kicks in. - indexResponse = client().prepareIndex("test", "type", "1") + indexResponse = client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setVersion(19) .setVersionType(VersionType.EXTERNAL) @@ -246,11 +248,7 @@ public void testExternalVersioning() throws Exception { .actionGet(); assertThat(indexResponse.getVersion(), equalTo(19L)); - deleteResponse = client().prepareDelete("test", "type", "1") - .setVersion(20) - .setVersionType(VersionType.EXTERNAL) - .execute() - .actionGet(); + deleteResponse = client().prepareDelete("test", "1").setVersion(20).setVersionType(VersionType.EXTERNAL).execute().actionGet(); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); assertThat(deleteResponse.getVersion(), equalTo(20L)); @@ -262,7 +260,8 @@ public void testExternalVersioning() throws Exception { Thread.sleep(300); // gc works based on estimated sampled time. Give it a chance... // And now we have previous version return -1 - indexResponse = client().prepareIndex("test", "type", "1") + indexResponse = client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setVersion(20) .setVersionType(VersionType.EXTERNAL) @@ -295,11 +294,12 @@ public void testCompareAndSetInitialDelete() throws Exception { ensureGreen(); assertFutureThrows( - client().prepareDelete("test", "type", "1").setIfSeqNo(17).setIfPrimaryTerm(10).execute(), + client().prepareDelete("test", "1").setIfSeqNo(17).setIfPrimaryTerm(10).execute(), VersionConflictEngineException.class ); - IndexResponse indexResponse = client().prepareIndex("test", "type", "1") + IndexResponse indexResponse = client().prepareIndex("test") + .setId("1") .setSource("field1", "value1_1") .setCreate(true) .execute() @@ -311,45 +311,45 @@ public void testCompareAndSet() { createIndex("test"); ensureGreen(); - IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").execute().actionGet(); + IndexResponse indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_1").execute().actionGet(); assertThat(indexResponse.getSeqNo(), equalTo(0L)); assertThat(indexResponse.getPrimaryTerm(), equalTo(1L)); - indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").setIfSeqNo(0L).setIfPrimaryTerm(1).get(); + indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_2").setIfSeqNo(0L).setIfPrimaryTerm(1).get(); assertThat(indexResponse.getSeqNo(), equalTo(1L)); assertThat(indexResponse.getPrimaryTerm(), equalTo(1L)); assertFutureThrows( - client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setIfSeqNo(10).setIfPrimaryTerm(1).execute(), + client().prepareIndex("test").setId("1").setSource("field1", "value1_1").setIfSeqNo(10).setIfPrimaryTerm(1).execute(), VersionConflictEngineException.class ); assertFutureThrows( - client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setIfSeqNo(10).setIfPrimaryTerm(2).execute(), + client().prepareIndex("test").setId("1").setSource("field1", "value1_1").setIfSeqNo(10).setIfPrimaryTerm(2).execute(), VersionConflictEngineException.class ); assertFutureThrows( - client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setIfSeqNo(1).setIfPrimaryTerm(2).execute(), + client().prepareIndex("test").setId("1").setSource("field1", "value1_1").setIfSeqNo(1).setIfPrimaryTerm(2).execute(), VersionConflictEngineException.class ); assertRequestBuilderThrows( - client().prepareDelete("test", "type", "1").setIfSeqNo(10).setIfPrimaryTerm(1), + client().prepareDelete("test", "1").setIfSeqNo(10).setIfPrimaryTerm(1), VersionConflictEngineException.class ); assertRequestBuilderThrows( - client().prepareDelete("test", "type", "1").setIfSeqNo(10).setIfPrimaryTerm(2), + client().prepareDelete("test", "1").setIfSeqNo(10).setIfPrimaryTerm(2), VersionConflictEngineException.class ); assertRequestBuilderThrows( - client().prepareDelete("test", "type", "1").setIfSeqNo(1).setIfPrimaryTerm(2), + client().prepareDelete("test", "1").setIfSeqNo(1).setIfPrimaryTerm(2), VersionConflictEngineException.class ); client().admin().indices().prepareRefresh().execute().actionGet(); for (int i = 0; i < 10; i++) { - final GetResponse response = client().prepareGet("test", "type", "1").get(); + final GetResponse response = client().prepareGet("test", "1").get(); assertThat(response.getSeqNo(), equalTo(1L)); assertThat(response.getPrimaryTerm(), equalTo(1L)); } @@ -367,27 +367,27 @@ public void testCompareAndSet() { assertThat(searchResponse.getHits().getAt(0).getVersion(), equalTo(Versions.NOT_FOUND)); } - DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1").setIfSeqNo(1).setIfPrimaryTerm(1).get(); + DeleteResponse deleteResponse = client().prepareDelete("test", "1").setIfSeqNo(1).setIfPrimaryTerm(1).get(); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); assertThat(deleteResponse.getSeqNo(), equalTo(2L)); assertThat(deleteResponse.getPrimaryTerm(), equalTo(1L)); assertRequestBuilderThrows( - client().prepareDelete("test", "type", "1").setIfSeqNo(1).setIfPrimaryTerm(1), + client().prepareDelete("test", "1").setIfSeqNo(1).setIfPrimaryTerm(1), VersionConflictEngineException.class ); assertRequestBuilderThrows( - client().prepareDelete("test", "type", "1").setIfSeqNo(3).setIfPrimaryTerm(12), + client().prepareDelete("test", "1").setIfSeqNo(3).setIfPrimaryTerm(12), VersionConflictEngineException.class ); assertRequestBuilderThrows( - client().prepareDelete("test", "type", "1").setIfSeqNo(1).setIfPrimaryTerm(2), + client().prepareDelete("test", "1").setIfSeqNo(1).setIfPrimaryTerm(2), VersionConflictEngineException.class ); // the doc is deleted. Even when we hit the deleted seqNo, a conditional delete should fail. assertRequestBuilderThrows( - client().prepareDelete("test", "type", "1").setIfSeqNo(2).setIfPrimaryTerm(1), + client().prepareDelete("test", "1").setIfSeqNo(2).setIfPrimaryTerm(1), VersionConflictEngineException.class ); } @@ -396,31 +396,31 @@ public void testSimpleVersioningWithFlush() throws Exception { createIndex("test"); ensureGreen(); - IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").get(); + IndexResponse indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_1").get(); assertThat(indexResponse.getSeqNo(), equalTo(0L)); client().admin().indices().prepareFlush().execute().actionGet(); - indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").setIfSeqNo(0).setIfPrimaryTerm(1).get(); + indexResponse = client().prepareIndex("test").setId("1").setSource("field1", "value1_2").setIfSeqNo(0).setIfPrimaryTerm(1).get(); assertThat(indexResponse.getSeqNo(), equalTo(1L)); client().admin().indices().prepareFlush().execute().actionGet(); assertRequestBuilderThrows( - client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setIfSeqNo(0).setIfPrimaryTerm(1), + client().prepareIndex("test").setId("1").setSource("field1", "value1_1").setIfSeqNo(0).setIfPrimaryTerm(1), VersionConflictEngineException.class ); assertRequestBuilderThrows( - client().prepareIndex("test", "type", "1").setCreate(true).setSource("field1", "value1_1"), + client().prepareIndex("test").setId("1").setCreate(true).setSource("field1", "value1_1"), VersionConflictEngineException.class ); assertRequestBuilderThrows( - client().prepareDelete("test", "type", "1").setIfSeqNo(0).setIfPrimaryTerm(1), + client().prepareDelete("test", "1").setIfSeqNo(0).setIfPrimaryTerm(1), VersionConflictEngineException.class ); for (int i = 0; i < 10; i++) { - assertThat(client().prepareGet("test", "type", "1").execute().actionGet().getVersion(), equalTo(2L)); + assertThat(client().prepareGet("test", "1").execute().actionGet().getVersion(), equalTo(2L)); } client().admin().indices().prepareRefresh().execute().actionGet(); @@ -443,7 +443,7 @@ public void testVersioningWithBulk() { ensureGreen(); BulkResponse bulkResponse = client().prepareBulk() - .add(client().prepareIndex("test", "type", "1").setSource("field1", "value1_1")) + .add(client().prepareIndex("test").setId("1").setSource("field1", "value1_1")) .execute() .actionGet(); assertThat(bulkResponse.hasFailures(), equalTo(false)); @@ -578,8 +578,6 @@ public String toString() { sb.append(deleteResponse.getIndex()); sb.append(" id="); sb.append(deleteResponse.getId()); - sb.append(" type="); - sb.append(deleteResponse.getType()); sb.append(" version="); sb.append(deleteResponse.getVersion()); sb.append(" found="); @@ -590,8 +588,6 @@ public String toString() { sb.append(indexResponse.getIndex()); sb.append(" id="); sb.append(indexResponse.getId()); - sb.append(" type="); - sb.append(indexResponse.getType()); sb.append(" version="); sb.append(indexResponse.getVersion()); sb.append(" created="); @@ -727,7 +723,7 @@ public void run() { long version = idVersion.version; if (idVersion.delete) { try { - idVersion.response = client().prepareDelete("test", "type", id) + idVersion.response = client().prepareDelete("test", id) .setVersion(version) .setVersionType(VersionType.EXTERNAL) .execute() @@ -739,7 +735,8 @@ public void run() { } } else { try { - idVersion.response = client().prepareIndex("test", "type", id) + idVersion.response = client().prepareIndex("test") + .setId(id) .setSource("foo", "bar") .setVersion(version) .setVersionType(VersionType.EXTERNAL) @@ -787,7 +784,7 @@ public void run() { } else { expected = -1; } - long actualVersion = client().prepareGet("test", "type", id).execute().actionGet().getVersion(); + long actualVersion = client().prepareGet("test", id).execute().actionGet().getVersion(); if (actualVersion != expected) { logger.error("--> FAILED: idVersion={} actualVersion= {}", idVersion, actualVersion); failed = true; @@ -822,7 +819,8 @@ public void testDeleteNotLost() throws Exception { client().admin().indices().prepareUpdateSettings("test").setSettings(newSettings).execute().actionGet(); // Index a doc: - client().prepareIndex("test", "type", "id") + client().prepareIndex("test") + .setId("id") .setSource("foo", "bar") .setOpType(DocWriteRequest.OpType.INDEX) .setVersion(10) @@ -836,28 +834,20 @@ public void testDeleteNotLost() throws Exception { } // Delete it - client().prepareDelete("test", "type", "id").setVersion(11).setVersionType(VersionType.EXTERNAL).execute().actionGet(); + client().prepareDelete("test", "id").setVersion(11).setVersionType(VersionType.EXTERNAL).execute().actionGet(); // Real-time get should reflect delete: - assertThat( - "doc should have been deleted", - client().prepareGet("test", "type", "id").execute().actionGet().getVersion(), - equalTo(-1L) - ); + assertThat("doc should have been deleted", client().prepareGet("test", "id").execute().actionGet().getVersion(), equalTo(-1L)); // ThreadPool.relativeTimeInMillis has default granularity of 200 msec, so we must sleep at least that long; sleep much longer in // case system is busy: Thread.sleep(1000); // Delete an unrelated doc (provokes pruning deletes from versionMap) - client().prepareDelete("test", "type", "id2").setVersion(11).setVersionType(VersionType.EXTERNAL).execute().actionGet(); + client().prepareDelete("test", "id2").setVersion(11).setVersionType(VersionType.EXTERNAL).execute().actionGet(); // Real-time get should still reflect delete: - assertThat( - "doc should have been deleted", - client().prepareGet("test", "type", "id").execute().actionGet().getVersion(), - equalTo(-1L) - ); + assertThat("doc should have been deleted", client().prepareGet("test", "id").execute().actionGet().getVersion(), equalTo(-1L)); } public void testGCDeletesZero() throws Exception { @@ -870,7 +860,8 @@ public void testGCDeletesZero() throws Exception { client().admin().indices().prepareUpdateSettings("test").setSettings(newSettings).execute().actionGet(); // Index a doc: - client().prepareIndex("test", "type", "id") + client().prepareIndex("test") + .setId("id") .setSource("foo", "bar") .setOpType(DocWriteRequest.OpType.INDEX) .setVersion(10) @@ -884,42 +875,42 @@ public void testGCDeletesZero() throws Exception { } // Delete it - client().prepareDelete("test", "type", "id").setVersion(11).setVersionType(VersionType.EXTERNAL).execute().actionGet(); + client().prepareDelete("test", "id").setVersion(11).setVersionType(VersionType.EXTERNAL).execute().actionGet(); // Real-time get should reflect delete even though index.gc_deletes is 0: - assertThat( - "doc should have been deleted", - client().prepareGet("test", "type", "id").execute().actionGet().getVersion(), - equalTo(-1L) - ); + assertThat("doc should have been deleted", client().prepareGet("test", "id").execute().actionGet().getVersion(), equalTo(-1L)); } public void testSpecialVersioning() { internalCluster().ensureAtLeastNumDataNodes(2); createIndex("test", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build()); - IndexResponse doc1 = client().prepareIndex("test", "type", "1") + IndexResponse doc1 = client().prepareIndex("test") + .setId("1") .setSource("field", "value1") .setVersion(0) .setVersionType(VersionType.EXTERNAL) .execute() .actionGet(); assertThat(doc1.getVersion(), equalTo(0L)); - IndexResponse doc2 = client().prepareIndex("test", "type", "1") + IndexResponse doc2 = client().prepareIndex("test") + .setId("1") .setSource("field", "value2") .setVersion(Versions.MATCH_ANY) .setVersionType(VersionType.INTERNAL) .execute() .actionGet(); assertThat(doc2.getVersion(), equalTo(1L)); - client().prepareDelete("test", "type", "1").get(); // v2 - IndexResponse doc3 = client().prepareIndex("test", "type", "1") + client().prepareDelete("test", "1").get(); // v2 + IndexResponse doc3 = client().prepareIndex("test") + .setId("1") .setSource("field", "value3") .setVersion(Versions.MATCH_DELETED) .setVersionType(VersionType.INTERNAL) .execute() .actionGet(); assertThat(doc3.getVersion(), equalTo(3L)); - IndexResponse doc4 = client().prepareIndex("test", "type", "1") + IndexResponse doc4 = client().prepareIndex("test") + .setId("1") .setSource("field", "value4") .setVersion(4L) .setVersionType(VersionType.EXTERNAL_GTE) diff --git a/server/src/main/java/org/opensearch/ExceptionsHelper.java b/server/src/main/java/org/opensearch/ExceptionsHelper.java index 1d3f2c0afce23..418bf9811a7b3 100644 --- a/server/src/main/java/org/opensearch/ExceptionsHelper.java +++ b/server/src/main/java/org/opensearch/ExceptionsHelper.java @@ -297,25 +297,22 @@ public static Optional maybeError(final Throwable cause) { * @param throwable the throwable to possibly throw on another thread */ public static void maybeDieOnAnotherThread(final Throwable throwable) { - ExceptionsHelper.maybeError(throwable) - .ifPresent( - error -> { - /* - * Here be dragons. We want to rethrow this so that it bubbles up to the uncaught exception handler. Yet, sometimes the stack - * contains statements that catch any throwable (e.g., Netty, and the JDK futures framework). This means that a rethrow here - * will not bubble up to where we want it to. So, we fork a thread and throw the exception from there where we are sure the - * stack does not contain statements that catch any throwable. We do not wrap the exception so as to not lose the original cause - * during exit. - */ - try { - // try to log the current stack trace - final String formatted = ExceptionsHelper.formatStackTrace(Thread.currentThread().getStackTrace()); - logger.error("fatal error\n{}", formatted); - } finally { - new Thread(() -> { throw error; }).start(); - } - } - ); + ExceptionsHelper.maybeError(throwable).ifPresent(error -> { + /* + * Here be dragons. We want to rethrow this so that it bubbles up to the uncaught exception handler. Yet, sometimes the stack + * contains statements that catch any throwable (e.g., Netty, and the JDK futures framework). This means that a rethrow here + * will not bubble up to where we want it to. So, we fork a thread and throw the exception from there where we are sure the + * stack does not contain statements that catch any throwable. We do not wrap the exception so as to not lose the original cause + * during exit. + */ + try { + // try to log the current stack trace + final String formatted = ExceptionsHelper.formatStackTrace(Thread.currentThread().getStackTrace()); + logger.error("fatal error\n{}", formatted); + } finally { + new Thread(() -> { throw error; }).start(); + } + }); } /** diff --git a/server/src/main/java/org/opensearch/Version.java b/server/src/main/java/org/opensearch/Version.java index 536e450da4a98..88e04a6c5dd77 100644 --- a/server/src/main/java/org/opensearch/Version.java +++ b/server/src/main/java/org/opensearch/Version.java @@ -79,6 +79,7 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_1_2_4 = new Version(1020499, org.apache.lucene.util.Version.LUCENE_8_10_1); public static final Version V_1_2_5 = new Version(1020599, org.apache.lucene.util.Version.LUCENE_8_10_1); public static final Version V_1_3_0 = new Version(1030099, org.apache.lucene.util.Version.LUCENE_8_10_1); + public static final Version V_1_4_0 = new Version(1040099, org.apache.lucene.util.Version.LUCENE_8_10_1); public static final Version V_2_0_0 = new Version(2000099, org.apache.lucene.util.Version.LUCENE_8_10_1); public static final Version CURRENT = V_2_0_0; diff --git a/server/src/main/java/org/opensearch/action/DocWriteRequest.java b/server/src/main/java/org/opensearch/action/DocWriteRequest.java index 93926923de57e..11d645435c71c 100644 --- a/server/src/main/java/org/opensearch/action/DocWriteRequest.java +++ b/server/src/main/java/org/opensearch/action/DocWriteRequest.java @@ -71,26 +71,6 @@ public interface DocWriteRequest extends IndicesRequest, Accountable { */ String index(); - /** - * Set the type for this request - * @return the Request - */ - T type(String type); - - /** - * Get the type that this request operates on - * @return the type - */ - String type(); - - /** - * Set the default type supplied to a bulk - * request if this individual request's type is null - * or empty - * @return the Request - */ - T defaultTypeIfNull(String defaultType); - /** * Get the id of the document for this request * @return the id diff --git a/server/src/main/java/org/opensearch/action/DocWriteResponse.java b/server/src/main/java/org/opensearch/action/DocWriteResponse.java index 9cec09b9d8b10..587f93ed09f52 100644 --- a/server/src/main/java/org/opensearch/action/DocWriteResponse.java +++ b/server/src/main/java/org/opensearch/action/DocWriteResponse.java @@ -31,6 +31,7 @@ package org.opensearch.action; +import org.opensearch.Version; import org.opensearch.action.support.WriteRequest; import org.opensearch.action.support.WriteRequest.RefreshPolicy; import org.opensearch.action.support.WriteResponse; @@ -45,6 +46,7 @@ import org.opensearch.common.xcontent.XContentParser; import org.opensearch.index.Index; import org.opensearch.index.IndexSettings; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.index.shard.ShardId; import org.opensearch.rest.RestStatus; @@ -66,7 +68,6 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr private static final String _SHARDS = "_shards"; private static final String _INDEX = "_index"; - private static final String _TYPE = "_type"; private static final String _ID = "_id"; private static final String _VERSION = "_version"; private static final String _SEQ_NO = "_seq_no"; @@ -127,16 +128,14 @@ public void writeTo(StreamOutput out) throws IOException { private final ShardId shardId; private final String id; - private final String type; private final long version; private final long seqNo; private final long primaryTerm; private boolean forcedRefresh; protected final Result result; - public DocWriteResponse(ShardId shardId, String type, String id, long seqNo, long primaryTerm, long version, Result result) { + public DocWriteResponse(ShardId shardId, String id, long seqNo, long primaryTerm, long version, Result result) { this.shardId = Objects.requireNonNull(shardId); - this.type = Objects.requireNonNull(type); this.id = Objects.requireNonNull(id); this.seqNo = seqNo; this.primaryTerm = primaryTerm; @@ -148,7 +147,10 @@ public DocWriteResponse(ShardId shardId, String type, String id, long seqNo, lon protected DocWriteResponse(ShardId shardId, StreamInput in) throws IOException { super(in); this.shardId = shardId; - type = in.readString(); + if (in.getVersion().before(Version.V_2_0_0)) { + String type = in.readString(); + assert MapperService.SINGLE_MAPPING_NAME.equals(type) : "Expected [_doc] but received [" + type + "]"; + } id = in.readString(); version = in.readZLong(); seqNo = in.readZLong(); @@ -164,7 +166,10 @@ protected DocWriteResponse(ShardId shardId, StreamInput in) throws IOException { protected DocWriteResponse(StreamInput in) throws IOException { super(in); shardId = new ShardId(in); - type = in.readString(); + if (in.getVersion().before(Version.V_2_0_0)) { + String type = in.readString(); + assert MapperService.SINGLE_MAPPING_NAME.equals(type) : "Expected [_doc] but received [" + type + "]"; + } id = in.readString(); version = in.readZLong(); seqNo = in.readZLong(); @@ -194,16 +199,6 @@ public ShardId getShardId() { return this.shardId; } - /** - * The type of the document changed. - * - * @deprecated Types are in the process of being removed. - */ - @Deprecated - public String getType() { - return this.type; - } - /** * The id of the document changed. */ @@ -270,7 +265,7 @@ public String getLocation(@Nullable String routing) { try { // encode the path components separately otherwise the path separators will be encoded encodedIndex = URLEncoder.encode(getIndex(), "UTF-8"); - encodedType = URLEncoder.encode(getType(), "UTF-8"); + encodedType = URLEncoder.encode(MapperService.SINGLE_MAPPING_NAME, "UTF-8"); encodedId = URLEncoder.encode(getId(), "UTF-8"); encodedRouting = routing == null ? null : URLEncoder.encode(routing, "UTF-8"); } catch (final UnsupportedEncodingException e) { @@ -308,7 +303,9 @@ public void writeTo(StreamOutput out) throws IOException { } private void writeWithoutShardId(StreamOutput out) throws IOException { - out.writeString(type); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeString(MapperService.SINGLE_MAPPING_NAME); + } out.writeString(id); out.writeZLong(version); out.writeZLong(seqNo); @@ -328,7 +325,6 @@ public final XContentBuilder toXContent(XContentBuilder builder, Params params) public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { ReplicationResponse.ShardInfo shardInfo = getShardInfo(); builder.field(_INDEX, shardId.getIndexName()); - builder.field(_TYPE, type); builder.field(_ID, id).field(_VERSION, version).field(RESULT, getResult().getLowercase()); if (forcedRefresh) { builder.field(FORCED_REFRESH, true); @@ -359,8 +355,6 @@ protected static void parseInnerToXContent(XContentParser parser, Builder contex if (_INDEX.equals(currentFieldName)) { // index uuid and shard id are unknown and can't be parsed back for now. context.setShardId(new ShardId(new Index(parser.text(), IndexMetadata.INDEX_UUID_NA_VALUE), -1)); - } else if (_TYPE.equals(currentFieldName)) { - context.setType(parser.text()); } else if (_ID.equals(currentFieldName)) { context.setId(parser.text()); } else if (_VERSION.equals(currentFieldName)) { @@ -399,7 +393,6 @@ protected static void parseInnerToXContent(XContentParser parser, Builder contex public abstract static class Builder { protected ShardId shardId = null; - protected String type = null; protected String id = null; protected Long version = null; protected Result result = null; @@ -416,14 +409,6 @@ public void setShardId(ShardId shardId) { this.shardId = shardId; } - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } - public String getId() { return id; } diff --git a/server/src/main/java/org/opensearch/action/RoutingMissingException.java b/server/src/main/java/org/opensearch/action/RoutingMissingException.java index ac6c4287849de..4f34a7847da4d 100644 --- a/server/src/main/java/org/opensearch/action/RoutingMissingException.java +++ b/server/src/main/java/org/opensearch/action/RoutingMissingException.java @@ -35,6 +35,7 @@ import org.opensearch.OpenSearchException; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.index.mapper.MapperService; import org.opensearch.rest.RestStatus; import java.io.IOException; @@ -46,8 +47,12 @@ public class RoutingMissingException extends OpenSearchException { private final String id; + public RoutingMissingException(String index, String id) { + this(index, MapperService.SINGLE_MAPPING_NAME, id); + } + public RoutingMissingException(String index, String type, String id) { - super("routing is required for [" + index + "]/[" + type + "]/[" + id + "]"); + super("routing is required for [" + index + "]/[" + id + "]"); Objects.requireNonNull(index, "index must not be null"); Objects.requireNonNull(type, "type must not be null"); Objects.requireNonNull(id, "id must not be null"); diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/health/TransportClusterHealthAction.java b/server/src/main/java/org/opensearch/action/admin/cluster/health/TransportClusterHealthAction.java index 1da95c680e057..6855803ba6c45 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/health/TransportClusterHealthAction.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/health/TransportClusterHealthAction.java @@ -364,9 +364,8 @@ static int prepareResponse( } if (request.waitForActiveShards().equals(ActiveShardCount.NONE) == false) { ActiveShardCount waitForActiveShards = request.waitForActiveShards(); - assert waitForActiveShards.equals( - ActiveShardCount.DEFAULT - ) == false : "waitForActiveShards must not be DEFAULT on the request object, instead it should be NONE"; + assert waitForActiveShards.equals(ActiveShardCount.DEFAULT) == false + : "waitForActiveShards must not be DEFAULT on the request object, instead it should be NONE"; if (waitForActiveShards.equals(ActiveShardCount.ALL)) { if (response.getUnassignedShards() == 0 && response.getInitializingShards() == 0) { // if we are waiting for all shards to be active, then the num of unassigned and num of initializing shards must be 0 diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java index 372bae023eb7d..80049b5e30fdf 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java @@ -187,24 +187,17 @@ void waitedForCompletion( TaskInfo snapshotOfRunningTask, ActionListener listener ) { - getFinishedTaskFromIndex( - thisTask, - request, - ActionListener.delegateResponse( - listener, - (delegatedListener, e) -> { - /* - * We couldn't load the task from the task index. Instead of 404 we should use the snapshot we took after it finished. If - * the error isn't a 404 then we'll just throw it back to the user. - */ - if (ExceptionsHelper.unwrap(e, ResourceNotFoundException.class) != null) { - delegatedListener.onResponse(new GetTaskResponse(new TaskResult(true, snapshotOfRunningTask))); - } else { - delegatedListener.onFailure(e); - } - } - ) - ); + getFinishedTaskFromIndex(thisTask, request, ActionListener.delegateResponse(listener, (delegatedListener, e) -> { + /* + * We couldn't load the task from the task index. Instead of 404 we should use the snapshot we took after it finished. If + * the error isn't a 404 then we'll just throw it back to the user. + */ + if (ExceptionsHelper.unwrap(e, ResourceNotFoundException.class) != null) { + delegatedListener.onResponse(new GetTaskResponse(new TaskResult(true, snapshotOfRunningTask))); + } else { + delegatedListener.onFailure(e); + } + })); } /** @@ -213,7 +206,7 @@ void waitedForCompletion( * coordinating node if the node is no longer part of the cluster. */ void getFinishedTaskFromIndex(Task thisTask, GetTaskRequest request, ActionListener listener) { - GetRequest get = new GetRequest(TaskResultsService.TASK_INDEX, TaskResultsService.TASK_TYPE, request.getTaskId().toString()); + GetRequest get = new GetRequest(TaskResultsService.TASK_INDEX, request.getTaskId().toString()); get.setParentTask(clusterService.localNode().getId(), thisTask.getId()); client.get(get, ActionListener.wrap(r -> onGetFinishedTaskFromIndex(r, listener), e -> { diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java index f5fe738e565b8..33d4ac5d50347 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java @@ -363,10 +363,8 @@ private void loadRepositoryData( } final long startTime = snapshotInfo.startTime(); final long endTime = snapshotInfo.endTime(); - assert endTime >= startTime - || (endTime == 0L && snapshotInfo.state().completed() == false) : "Inconsistent timestamps found in SnapshotInfo [" - + snapshotInfo - + "]"; + assert endTime >= startTime || (endTime == 0L && snapshotInfo.state().completed() == false) + : "Inconsistent timestamps found in SnapshotInfo [" + snapshotInfo + "]"; builder.add( new SnapshotStatus( new Snapshot(repositoryName, snapshotId), diff --git a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexClusterStateUpdateRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexClusterStateUpdateRequest.java index 27625028887f9..5ca6fb4226b64 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexClusterStateUpdateRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexClusterStateUpdateRequest.java @@ -41,9 +41,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.index.Index; -import java.util.HashMap; import java.util.HashSet; -import java.util.Map; import java.util.Set; /** @@ -61,7 +59,7 @@ public class CreateIndexClusterStateUpdateRequest extends ClusterStateUpdateRequ private Settings settings = Settings.Builder.EMPTY_SETTINGS; - private final Map mappings = new HashMap<>(); + private String mappings = "{}"; private final Set aliases = new HashSet<>(); @@ -80,8 +78,8 @@ public CreateIndexClusterStateUpdateRequest settings(Settings settings) { return this; } - public CreateIndexClusterStateUpdateRequest mappings(Map mappings) { - this.mappings.putAll(mappings); + public CreateIndexClusterStateUpdateRequest mappings(String mappings) { + this.mappings = mappings; return this; } @@ -122,7 +120,7 @@ public Settings settings() { return settings; } - public Map mappings() { + public String mappings() { return mappings; } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java index 1d6c093f97b0e..8b38308d39c93 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java @@ -35,6 +35,7 @@ import org.opensearch.LegacyESVersion; import org.opensearch.OpenSearchGenerationException; import org.opensearch.OpenSearchParseException; +import org.opensearch.Version; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.IndicesRequest; import org.opensearch.action.admin.indices.alias.Alias; @@ -46,23 +47,21 @@ import org.opensearch.common.Strings; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; -import org.opensearch.common.collect.MapBuilder; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.DeprecationHandler; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.NamedXContentRegistry; -import org.opensearch.common.xcontent.ToXContentObject; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.index.mapper.MapperService; import java.io.IOException; -import java.io.InputStream; -import java.util.HashMap; +import java.util.Collections; import java.util.HashSet; import java.util.Map; import java.util.Objects; @@ -82,7 +81,7 @@ * @see org.opensearch.client.Requests#createIndexRequest(String) * @see CreateIndexResponse */ -public class CreateIndexRequest extends AcknowledgedRequest implements IndicesRequest, ToXContentObject { +public class CreateIndexRequest extends AcknowledgedRequest implements IndicesRequest { public static final ParseField MAPPINGS = new ParseField("mappings"); public static final ParseField SETTINGS = new ParseField("settings"); @@ -94,7 +93,7 @@ public class CreateIndexRequest extends AcknowledgedRequest private Settings settings = EMPTY_SETTINGS; - private final Map mappings = new HashMap<>(); + private String mappings = "{}"; private final Set aliases = new HashSet<>(); @@ -105,11 +104,21 @@ public CreateIndexRequest(StreamInput in) throws IOException { cause = in.readString(); index = in.readString(); settings = readSettingsFromStream(in); - int size = in.readVInt(); - for (int i = 0; i < size; i++) { - final String type = in.readString(); - String source = in.readString(); - mappings.put(type, source); + if (in.getVersion().before(Version.V_2_0_0)) { + int size = in.readVInt(); + if (size == 1) { + String type = in.readString(); + if (MapperService.SINGLE_MAPPING_NAME.equals(type) == false) { + throw new IllegalArgumentException( + "Expected to receive mapping type of [" + MapperService.SINGLE_MAPPING_NAME + "] but got [" + type + "]" + ); + } + mappings = in.readString(); + } else if (size != 0) { + throw new IllegalStateException("Expected to read 0 or 1 mappings, but received " + size); + } + } else { + mappings = in.readString(); } int aliasesSize = in.readVInt(); for (int i = 0; i < aliasesSize; i++) { @@ -224,14 +233,16 @@ public CreateIndexRequest settings(Map source) { } /** - * Adds mapping that will be added when the index gets created. + * Set the mapping for this index * - * @param type The mapping type - * @param source The mapping source - * @param xContentType The content type of the source + * The mapping should be in the form of a JSON string, with an outer _doc key + *
        +     *     .mapping("{\"_doc\":{\"properties\": ... }}")
        +     * 
        */ - public CreateIndexRequest mapping(String type, String source, XContentType xContentType) { - return mapping(type, new BytesArray(source), xContentType); + public CreateIndexRequest mapping(String mapping) { + this.mappings = mapping; + return this; } /** @@ -240,27 +251,23 @@ public CreateIndexRequest mapping(String type, String source, XContentType xCont * @param type The mapping type * @param source The mapping source * @param xContentType the content type of the mapping source + * @deprecated types are being removed */ + @Deprecated private CreateIndexRequest mapping(String type, BytesReference source, XContentType xContentType) { Objects.requireNonNull(xContentType); Map mappingAsMap = XContentHelper.convertToMap(source, false, xContentType).v2(); return mapping(type, mappingAsMap); } - /** - * The cause for this index creation. - */ - public CreateIndexRequest cause(String cause) { - this.cause = cause; - return this; - } - /** * Adds mapping that will be added when the index gets created. * * @param type The mapping type * @param source The mapping source + * @deprecated types are being removed */ + @Deprecated public CreateIndexRequest mapping(String type, XContentBuilder source) { return mapping(type, BytesReference.bytes(source), source.contentType()); } @@ -270,20 +277,21 @@ public CreateIndexRequest mapping(String type, XContentBuilder source) { * * @param type The mapping type * @param source The mapping source + * @deprecated types are being removed */ + @Deprecated public CreateIndexRequest mapping(String type, Map source) { - if (mappings.containsKey(type)) { - throw new IllegalStateException("mappings for type \"" + type + "\" were already defined"); - } // wrap it in a type map if its not if (source.size() != 1 || !source.containsKey(type)) { - source = MapBuilder.newMapBuilder().put(type, source).map(); + source = Collections.singletonMap(MapperService.SINGLE_MAPPING_NAME, source); + } else if (MapperService.SINGLE_MAPPING_NAME.equals(type) == false) { + // if it has a different type name, then unwrap and rewrap with _doc + source = Collections.singletonMap(MapperService.SINGLE_MAPPING_NAME, source.get(type)); } try { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.map(source); - mappings.put(type, Strings.toString(builder)); - return this; + return mapping(Strings.toString(builder)); } catch (IOException e) { throw new OpenSearchGenerationException("Failed to generate [" + source + "]", e); } @@ -292,9 +300,19 @@ public CreateIndexRequest mapping(String type, Map source) { /** * A specialized simplified mapping source method, takes the form of simple properties definition: * ("field1", "type=string,store=true"). + * @deprecated types are being removed */ + @Deprecated public CreateIndexRequest mapping(String type, Object... source) { - mapping(type, PutMappingRequest.buildFromSimplifiedDef(type, source)); + mapping(type, PutMappingRequest.buildFromSimplifiedDef(source)); + return this; + } + + /** + * The cause for this index creation. + */ + public CreateIndexRequest cause(String cause) { + this.cause = cause; return this; } @@ -413,7 +431,7 @@ public CreateIndexRequest source(Map source, DeprecationHandler depre return this; } - public Map mappings() { + public String mappings() { return this.mappings; } @@ -459,10 +477,16 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(cause); out.writeString(index); writeSettingsToStream(settings, out); - out.writeVInt(mappings.size()); - for (Map.Entry entry : mappings.entrySet()) { - out.writeString(entry.getKey()); - out.writeString(entry.getValue()); + if (out.getVersion().before(Version.V_2_0_0)) { + if ("{}".equals(mappings)) { + out.writeVInt(0); + } else { + out.writeVInt(1); + out.writeString(MapperService.SINGLE_MAPPING_NAME); + out.writeString(mappings); + } + } else { + out.writeString(mappings); } out.writeVInt(aliases.size()); for (Alias alias : aliases) { @@ -473,33 +497,4 @@ public void writeTo(StreamOutput out) throws IOException { } waitForActiveShards.writeTo(out); } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - innerToXContent(builder, params); - builder.endObject(); - return builder; - } - - public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(SETTINGS.getPreferredName()); - settings.toXContent(builder, params); - builder.endObject(); - - builder.startObject(MAPPINGS.getPreferredName()); - for (Map.Entry entry : mappings.entrySet()) { - try (InputStream stream = new BytesArray(entry.getValue()).streamInput()) { - builder.rawField(entry.getKey(), stream, XContentType.JSON); - } - } - builder.endObject(); - - builder.startObject(ALIASES.getPreferredName()); - for (Alias alias : aliases) { - alias.toXContent(builder, params); - } - builder.endObject(); - return builder; - } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java index 383945002c56c..77e48d079cb5c 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java @@ -111,12 +111,11 @@ public CreateIndexRequestBuilder setSettings(Map source) { /** * Adds mapping that will be added when the index gets created. * - * @param type The mapping type * @param source The mapping source - * @param xContentType The content type of the source */ - public CreateIndexRequestBuilder addMapping(String type, String source, XContentType xContentType) { - request.mapping(type, source, xContentType); + @Deprecated + public CreateIndexRequestBuilder setMapping(String source) { + request.mapping(source); return this; } @@ -133,7 +132,9 @@ public CreateIndexRequestBuilder setCause(String cause) { * * @param type The mapping type * @param source The mapping source + * @deprecated types are being removed */ + @Deprecated public CreateIndexRequestBuilder addMapping(String type, XContentBuilder source) { request.mapping(type, source); return this; @@ -144,7 +145,9 @@ public CreateIndexRequestBuilder addMapping(String type, XContentBuilder source) * * @param type The mapping type * @param source The mapping source + * @deprecated types are being removed */ + @Deprecated public CreateIndexRequestBuilder addMapping(String type, Map source) { request.mapping(type, source); return this; @@ -153,7 +156,9 @@ public CreateIndexRequestBuilder addMapping(String type, Map sou /** * A specialized simplified mapping source method, takes the form of simple properties definition: * ("field1", "type=string,store=true"). + * @deprecated types are being removed */ + @Deprecated public CreateIndexRequestBuilder addMapping(String type, Object... source) { request.mapping(type, source); return this; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/get/GetIndexResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/get/GetIndexResponse.java index aa1d5feb66c9e..4465dc88fe87d 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/get/GetIndexResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/get/GetIndexResponse.java @@ -33,8 +33,8 @@ package org.opensearch.action.admin.indices.get; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; -import org.apache.lucene.util.CollectionUtil; import org.opensearch.LegacyESVersion; +import org.opensearch.Version; import org.opensearch.action.ActionResponse; import org.opensearch.cluster.metadata.AliasMetadata; import org.opensearch.cluster.metadata.MappingMetadata; @@ -45,28 +45,21 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.ToXContentObject; import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentParser.Token; import org.opensearch.index.mapper.MapperService; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.Comparator; import java.util.List; import java.util.Objects; -import static org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.opensearch.rest.BaseRestHandler.DEFAULT_INCLUDE_TYPE_NAME_POLICY; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; - /** * A response for a get index action. */ public class GetIndexResponse extends ActionResponse implements ToXContentObject { - private ImmutableOpenMap> mappings = ImmutableOpenMap.of(); + private ImmutableOpenMap mappings = ImmutableOpenMap.of(); private ImmutableOpenMap> aliases = ImmutableOpenMap.of(); private ImmutableOpenMap settings = ImmutableOpenMap.of(); private ImmutableOpenMap defaultSettings = ImmutableOpenMap.of(); @@ -75,7 +68,7 @@ public class GetIndexResponse extends ActionResponse implements ToXContentObject public GetIndexResponse( String[] indices, - ImmutableOpenMap> mappings, + ImmutableOpenMap mappings, ImmutableOpenMap> aliases, ImmutableOpenMap settings, ImmutableOpenMap defaultSettings, @@ -106,15 +99,26 @@ public GetIndexResponse( this.indices = in.readStringArray(); int mappingsSize = in.readVInt(); - ImmutableOpenMap.Builder> mappingsMapBuilder = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder mappingsMapBuilder = ImmutableOpenMap.builder(); for (int i = 0; i < mappingsSize; i++) { - String key = in.readString(); - int valueSize = in.readVInt(); - ImmutableOpenMap.Builder mappingEntryBuilder = ImmutableOpenMap.builder(); - for (int j = 0; j < valueSize; j++) { - mappingEntryBuilder.put(in.readString(), new MappingMetadata(in)); + String index = in.readString(); + if (in.getVersion().before(Version.V_2_0_0)) { + int numMappings = in.readVInt(); + if (numMappings == 0) { + mappingsMapBuilder.put(index, MappingMetadata.EMPTY_MAPPINGS); + } else if (numMappings == 1) { + String type = in.readString(); + if (MapperService.SINGLE_MAPPING_NAME.equals(type) == false) { + throw new IllegalStateException("Expected " + MapperService.SINGLE_MAPPING_NAME + " but got [" + type + "]"); + } + mappingsMapBuilder.put(index, new MappingMetadata(in)); + } else { + throw new IllegalStateException("Expected 0 or 1 mappings but got: " + numMappings); + } + } else { + final MappingMetadata metadata = in.readOptionalWriteable(MappingMetadata::new); + mappingsMapBuilder.put(index, metadata != null ? metadata : MappingMetadata.EMPTY_MAPPINGS); } - mappingsMapBuilder.put(key, mappingEntryBuilder.build()); } mappings = mappingsMapBuilder.build(); @@ -164,11 +168,11 @@ public String[] getIndices() { return indices(); } - public ImmutableOpenMap> mappings() { + public ImmutableOpenMap mappings() { return mappings; } - public ImmutableOpenMap> getMappings() { + public ImmutableOpenMap getMappings() { return mappings(); } @@ -236,12 +240,16 @@ public String getSetting(String index, String setting) { public void writeTo(StreamOutput out) throws IOException { out.writeStringArray(indices); out.writeVInt(mappings.size()); - for (ObjectObjectCursor> indexEntry : mappings) { + for (ObjectObjectCursor indexEntry : mappings) { out.writeString(indexEntry.key); - out.writeVInt(indexEntry.value.size()); - for (ObjectObjectCursor mappingEntry : indexEntry.value) { - out.writeString(mappingEntry.key); - mappingEntry.value.writeTo(out); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeVInt(indexEntry.value == MappingMetadata.EMPTY_MAPPINGS ? 0 : 1); + if (indexEntry.value != MappingMetadata.EMPTY_MAPPINGS) { + out.writeString(MapperService.SINGLE_MAPPING_NAME); + indexEntry.value.writeTo(out); + } + } else { + out.writeOptionalWriteable(indexEntry.value); } } out.writeVInt(aliases.size()); @@ -287,31 +295,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } builder.endObject(); - ImmutableOpenMap indexMappings = mappings.get(index); - boolean includeTypeName = params.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - if (includeTypeName) { - builder.startObject("mappings"); - if (indexMappings != null) { - for (final ObjectObjectCursor typeEntry : indexMappings) { - builder.field(typeEntry.key); - builder.map(typeEntry.value.sourceAsMap()); - } - } - builder.endObject(); + MappingMetadata indexMappings = mappings.get(index); + if (indexMappings == null) { + builder.startObject("mappings").endObject(); } else { - MappingMetadata mappings = null; - for (final ObjectObjectCursor typeEntry : indexMappings) { - if (typeEntry.key.equals(MapperService.DEFAULT_MAPPING) == false) { - assert mappings == null; - mappings = typeEntry.value; - } - } - if (mappings == null) { - // no mappings yet - builder.startObject("mappings").endObject(); - } else { - builder.field("mappings", mappings.sourceAsMap()); - } + builder.field("mappings", indexMappings.sourceAsMap()); } builder.startObject("settings"); @@ -340,141 +328,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - private static List parseAliases(XContentParser parser) throws IOException { - List indexAliases = new ArrayList<>(); - // We start at START_OBJECT since parseIndexEntry ensures that - while (parser.nextToken() != Token.END_OBJECT) { - ensureExpectedToken(Token.FIELD_NAME, parser.currentToken(), parser); - indexAliases.add(AliasMetadata.Builder.fromXContent(parser)); - } - return indexAliases; - } - - private static ImmutableOpenMap parseMappings(XContentParser parser) throws IOException { - ImmutableOpenMap.Builder indexMappings = ImmutableOpenMap.builder(); - // We start at START_OBJECT since parseIndexEntry ensures that - while (parser.nextToken() != Token.END_OBJECT) { - ensureExpectedToken(Token.FIELD_NAME, parser.currentToken(), parser); - parser.nextToken(); - if (parser.currentToken() == Token.START_OBJECT) { - String mappingType = parser.currentName(); - indexMappings.put(mappingType, new MappingMetadata(mappingType, parser.map())); - } else if (parser.currentToken() == Token.START_ARRAY) { - parser.skipChildren(); - } - } - return indexMappings.build(); - } - - private static IndexEntry parseIndexEntry(XContentParser parser) throws IOException { - List indexAliases = null; - ImmutableOpenMap indexMappings = null; - Settings indexSettings = null; - Settings indexDefaultSettings = null; - String dataStream = null; - // We start at START_OBJECT since fromXContent ensures that - while (parser.nextToken() != Token.END_OBJECT) { - ensureExpectedToken(Token.FIELD_NAME, parser.currentToken(), parser); - parser.nextToken(); - if (parser.currentToken() == Token.START_OBJECT) { - switch (parser.currentName()) { - case "aliases": - indexAliases = parseAliases(parser); - break; - case "mappings": - indexMappings = parseMappings(parser); - break; - case "settings": - indexSettings = Settings.fromXContent(parser); - break; - case "defaults": - indexDefaultSettings = Settings.fromXContent(parser); - break; - default: - parser.skipChildren(); - } - } else if (parser.currentToken() == Token.VALUE_STRING) { - if (parser.currentName().equals("data_stream")) { - dataStream = parser.text(); - } - parser.skipChildren(); - } else if (parser.currentToken() == Token.START_ARRAY) { - parser.skipChildren(); - } - } - return new IndexEntry(indexAliases, indexMappings, indexSettings, indexDefaultSettings, dataStream); - } - - // This is just an internal container to make stuff easier for returning - private static class IndexEntry { - List indexAliases = new ArrayList<>(); - ImmutableOpenMap indexMappings = ImmutableOpenMap.of(); - Settings indexSettings = Settings.EMPTY; - Settings indexDefaultSettings = Settings.EMPTY; - String dataStream; - - IndexEntry( - List indexAliases, - ImmutableOpenMap indexMappings, - Settings indexSettings, - Settings indexDefaultSettings, - String dataStream - ) { - if (indexAliases != null) this.indexAliases = indexAliases; - if (indexMappings != null) this.indexMappings = indexMappings; - if (indexSettings != null) this.indexSettings = indexSettings; - if (indexDefaultSettings != null) this.indexDefaultSettings = indexDefaultSettings; - if (dataStream != null) this.dataStream = dataStream; - } - } - - public static GetIndexResponse fromXContent(XContentParser parser) throws IOException { - ImmutableOpenMap.Builder> aliases = ImmutableOpenMap.builder(); - ImmutableOpenMap.Builder> mappings = ImmutableOpenMap.builder(); - ImmutableOpenMap.Builder settings = ImmutableOpenMap.builder(); - ImmutableOpenMap.Builder defaultSettings = ImmutableOpenMap.builder(); - ImmutableOpenMap.Builder dataStreams = ImmutableOpenMap.builder(); - List indices = new ArrayList<>(); - - if (parser.currentToken() == null) { - parser.nextToken(); - } - ensureExpectedToken(Token.START_OBJECT, parser.currentToken(), parser); - parser.nextToken(); - - while (!parser.isClosed()) { - if (parser.currentToken() == Token.START_OBJECT) { - // we assume this is an index entry - String indexName = parser.currentName(); - indices.add(indexName); - IndexEntry indexEntry = parseIndexEntry(parser); - // make the order deterministic - CollectionUtil.timSort(indexEntry.indexAliases, Comparator.comparing(AliasMetadata::alias)); - aliases.put(indexName, Collections.unmodifiableList(indexEntry.indexAliases)); - mappings.put(indexName, indexEntry.indexMappings); - settings.put(indexName, indexEntry.indexSettings); - if (indexEntry.indexDefaultSettings.isEmpty() == false) { - defaultSettings.put(indexName, indexEntry.indexDefaultSettings); - } - if (indexEntry.dataStream != null) { - dataStreams.put(indexName, indexEntry.dataStream); - } - } else if (parser.currentToken() == Token.START_ARRAY) { - parser.skipChildren(); - } else { - parser.nextToken(); - } - } - return new GetIndexResponse( - indices.toArray(new String[0]), - mappings.build(), - aliases.build(), - settings.build(), - defaultSettings.build(), - dataStreams.build() - ); - } - @Override public String toString() { return Strings.toString(this); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/get/TransportGetIndexAction.java b/server/src/main/java/org/opensearch/action/admin/indices/get/TransportGetIndexAction.java index 872cc66f8c1ba..0cd3214307359 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/get/TransportGetIndexAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/get/TransportGetIndexAction.java @@ -102,7 +102,7 @@ protected void doMasterOperation( final ClusterState state, final ActionListener listener ) { - ImmutableOpenMap> mappingsResult = ImmutableOpenMap.of(); + ImmutableOpenMap mappingsResult = ImmutableOpenMap.of(); ImmutableOpenMap> aliasesResult = ImmutableOpenMap.of(); ImmutableOpenMap settings = ImmutableOpenMap.of(); ImmutableOpenMap defaultSettings = ImmutableOpenMap.of(); @@ -121,8 +121,7 @@ protected void doMasterOperation( case MAPPINGS: if (!doneMappings) { try { - mappingsResult = state.metadata() - .findMappings(concreteIndices, request.types(), indicesService.getFieldFilter()); + mappingsResult = state.metadata().findMappings(concreteIndices, indicesService.getFieldFilter()); doneMappings = true; } catch (IOException e) { listener.onFailure(e); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsIndexRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsIndexRequest.java index be13313cbe9e7..961662ecdcf7e 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsIndexRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsIndexRequest.java @@ -32,10 +32,12 @@ package org.opensearch.action.admin.indices.mapping.get; +import org.opensearch.Version; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.OriginalIndices; import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.support.single.shard.SingleShardRequest; +import org.opensearch.common.Strings; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -43,26 +45,26 @@ public class GetFieldMappingsIndexRequest extends SingleShardRequest { - private final boolean probablySingleFieldRequest; private final boolean includeDefaults; private final String[] fields; - private final String[] types; - private OriginalIndices originalIndices; + private final OriginalIndices originalIndices; GetFieldMappingsIndexRequest(StreamInput in) throws IOException { super(in); - types = in.readStringArray(); + if (in.getVersion().before(Version.V_2_0_0)) { + in.readStringArray(); // removed types array + } fields = in.readStringArray(); includeDefaults = in.readBoolean(); - probablySingleFieldRequest = in.readBoolean(); + if (in.getVersion().before(Version.V_2_0_0)) { + in.readBoolean(); // removed probablySingleField boolean + } originalIndices = OriginalIndices.readOriginalIndices(in); } - GetFieldMappingsIndexRequest(GetFieldMappingsRequest other, String index, boolean probablySingleFieldRequest) { - this.probablySingleFieldRequest = probablySingleFieldRequest; + GetFieldMappingsIndexRequest(GetFieldMappingsRequest other, String index) { this.includeDefaults = other.includeDefaults(); - this.types = other.types(); this.fields = other.fields(); assert index != null; this.index(index); @@ -74,18 +76,10 @@ public ActionRequestValidationException validate() { return null; } - public String[] types() { - return types; - } - public String[] fields() { return fields; } - public boolean probablySingleFieldRequest() { - return probablySingleFieldRequest; - } - public boolean includeDefaults() { return includeDefaults; } @@ -103,10 +97,14 @@ public IndicesOptions indicesOptions() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeStringArray(types); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeStringArray(Strings.EMPTY_ARRAY); + } out.writeStringArray(fields); out.writeBoolean(includeDefaults); - out.writeBoolean(probablySingleFieldRequest); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeBoolean(false); + } OriginalIndices.writeOriginalIndices(originalIndices, out); } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsRequest.java index 8ed9de2427948..e6a2ad3187250 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsRequest.java @@ -32,6 +32,7 @@ package org.opensearch.action.admin.indices.mapping.get; +import org.opensearch.Version; import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.IndicesRequest; @@ -41,6 +42,7 @@ import org.opensearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.util.Arrays; /** * Request the mappings of specific fields @@ -57,7 +59,6 @@ public class GetFieldMappingsRequest extends ActionRequest implements IndicesReq private boolean includeDefaults = false; private String[] indices = Strings.EMPTY_ARRAY; - private String[] types = Strings.EMPTY_ARRAY; private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpen(); @@ -66,7 +67,12 @@ public GetFieldMappingsRequest() {} public GetFieldMappingsRequest(StreamInput in) throws IOException { super(in); indices = in.readStringArray(); - types = in.readStringArray(); + if (in.getVersion().before(Version.V_2_0_0)) { + String[] types = in.readStringArray(); + if (types != Strings.EMPTY_ARRAY) { + throw new IllegalArgumentException("Expected empty type array but received [" + Arrays.toString(types) + "]"); + } + } indicesOptions = IndicesOptions.readIndicesOptions(in); local = in.readBoolean(); fields = in.readStringArray(); @@ -92,11 +98,6 @@ public GetFieldMappingsRequest indices(String... indices) { return this; } - public GetFieldMappingsRequest types(String... types) { - this.types = types; - return this; - } - public GetFieldMappingsRequest indicesOptions(IndicesOptions indicesOptions) { this.indicesOptions = indicesOptions; return this; @@ -107,10 +108,6 @@ public String[] indices() { return indices; } - public String[] types() { - return types; - } - @Override public IndicesOptions indicesOptions() { return indicesOptions; @@ -150,7 +147,9 @@ public ActionRequestValidationException validate() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeStringArray(indices); - out.writeStringArray(types); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeStringArray(Strings.EMPTY_ARRAY); + } indicesOptions.writeIndicesOptions(out); out.writeBoolean(local); out.writeStringArray(fields); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsRequestBuilder.java index 19eb0afc18262..4a8c624e7e06e 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsRequestBuilder.java @@ -54,16 +54,6 @@ public GetFieldMappingsRequestBuilder addIndices(String... indices) { return this; } - public GetFieldMappingsRequestBuilder setTypes(String... types) { - request.types(types); - return this; - } - - public GetFieldMappingsRequestBuilder addTypes(String... types) { - request.types(ArrayUtils.concat(request.types(), types)); - return this; - } - public GetFieldMappingsRequestBuilder setIndicesOptions(IndicesOptions indicesOptions) { request.indicesOptions(indicesOptions); return this; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java index 562c92da8673b..12024ef455a32 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java @@ -32,9 +32,9 @@ package org.opensearch.action.admin.indices.mapping.get; +import org.opensearch.Version; import org.opensearch.action.ActionResponse; import org.opensearch.common.ParseField; -import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -48,7 +48,6 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.mapper.Mapper; import org.opensearch.index.mapper.MapperService; -import org.opensearch.rest.BaseRestHandler; import java.io.IOException; import java.io.InputStream; @@ -59,8 +58,6 @@ import static java.util.Collections.unmodifiableMap; import static org.opensearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.opensearch.rest.BaseRestHandler.DEFAULT_INCLUDE_TYPE_NAME_POLICY; /** * Response object for {@link GetFieldMappingsRequest} API @@ -101,37 +98,37 @@ public class GetFieldMappingsResponse extends ActionResponse implements ToXConte }, MAPPINGS, ObjectParser.ValueType.OBJECT); } - private final Map>> mappings; + private final Map> mappings; - GetFieldMappingsResponse(Map>> mappings) { + GetFieldMappingsResponse(Map> mappings) { this.mappings = mappings; } GetFieldMappingsResponse(StreamInput in) throws IOException { super(in); int size = in.readVInt(); - Map>> indexMapBuilder = new HashMap<>(size); + Map> indexMapBuilder = new HashMap<>(size); for (int i = 0; i < size; i++) { String index = in.readString(); - int typesSize = in.readVInt(); - Map> typeMapBuilder = new HashMap<>(typesSize); - for (int j = 0; j < typesSize; j++) { - String type = in.readString(); - int fieldSize = in.readVInt(); - Map fieldMapBuilder = new HashMap<>(fieldSize); - for (int k = 0; k < fieldSize; k++) { - fieldMapBuilder.put(in.readString(), new FieldMappingMetadata(in.readString(), in.readBytesReference())); + if (in.getVersion().before(Version.V_2_0_0)) { + int typesSize = in.readVInt(); + if (typesSize != 1) { + throw new IllegalStateException("Expected single type but received [" + typesSize + "]"); } - typeMapBuilder.put(type, unmodifiableMap(fieldMapBuilder)); + in.readString(); // type } - indexMapBuilder.put(index, unmodifiableMap(typeMapBuilder)); + int fieldSize = in.readVInt(); + Map fieldMapBuilder = new HashMap<>(fieldSize); + for (int k = 0; k < fieldSize; k++) { + fieldMapBuilder.put(in.readString(), new FieldMappingMetadata(in.readString(), in.readBytesReference())); + } + indexMapBuilder.put(index, unmodifiableMap(fieldMapBuilder)); } mappings = unmodifiableMap(indexMapBuilder); - } /** returns the retrieved field mapping. The return map keys are index, type, field (as specified in the request). */ - public Map>> mappings() { + public Map> mappings() { return mappings; } @@ -141,44 +138,23 @@ public Map>> mappings() { * @param field field name as specified in the {@link GetFieldMappingsRequest} * @return FieldMappingMetadata for the requested field or null if not found. */ - public FieldMappingMetadata fieldMappings(String index, String type, String field) { - Map> indexMapping = mappings.get(index); + public FieldMappingMetadata fieldMappings(String index, String field) { + Map indexMapping = mappings.get(index); if (indexMapping == null) { return null; } - Map typeMapping = indexMapping.get(type); - if (typeMapping == null) { - return null; - } - return typeMapping.get(field); + return indexMapping.get(field); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - boolean includeTypeName = params.paramAsBoolean(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - builder.startObject(); - for (Map.Entry>> indexEntry : mappings.entrySet()) { + for (Map.Entry> indexEntry : mappings.entrySet()) { builder.startObject(indexEntry.getKey()); builder.startObject(MAPPINGS.getPreferredName()); - if (includeTypeName == false) { - Map mappings = null; - for (Map.Entry> typeEntry : indexEntry.getValue().entrySet()) { - if (typeEntry.getKey().equals(MapperService.DEFAULT_MAPPING) == false) { - assert mappings == null; - mappings = typeEntry.getValue(); - } - } - if (mappings != null) { - addFieldMappingsToBuilder(builder, params, mappings); - } - } else { - for (Map.Entry> typeEntry : indexEntry.getValue().entrySet()) { - builder.startObject(typeEntry.getKey()); - addFieldMappingsToBuilder(builder, params, typeEntry.getValue()); - builder.endObject(); - } + if (mappings != null) { + addFieldMappingsToBuilder(builder, params, indexEntry.getValue()); } builder.endObject(); @@ -197,26 +173,7 @@ private void addFieldMappingsToBuilder(XContentBuilder builder, Params params, M } } - public static GetFieldMappingsResponse fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - - final Map>> mappings = new HashMap<>(); - if (parser.nextToken() == XContentParser.Token.FIELD_NAME) { - while (parser.currentToken() == XContentParser.Token.FIELD_NAME) { - final String index = parser.currentName(); - - final Map> typeMappings = PARSER.parse(parser, index); - mappings.put(index, typeMappings); - - parser.nextToken(); - } - } - - return new GetFieldMappingsResponse(mappings); - } - public static class FieldMappingMetadata implements ToXContentFragment { - public static final FieldMappingMetadata NULL = new FieldMappingMetadata("", BytesArray.EMPTY); private static final ParseField FULL_NAME = new ParseField("full_name"); private static final ParseField MAPPING = new ParseField("mapping"); @@ -253,10 +210,6 @@ public Map sourceAsMap() { return XContentHelper.convertToMap(source, true, XContentType.JSON).v2(); } - public boolean isNull() { - return NULL.fullName().equals(fullName) && NULL.source.length() == source.length(); - } - // pkg-private for testing BytesReference getSource() { return source; @@ -301,18 +254,18 @@ public int hashCode() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(mappings.size()); - for (Map.Entry>> indexEntry : mappings.entrySet()) { + for (Map.Entry> indexEntry : mappings.entrySet()) { out.writeString(indexEntry.getKey()); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeVInt(1); + out.writeString(MapperService.SINGLE_MAPPING_NAME); + } out.writeVInt(indexEntry.getValue().size()); - for (Map.Entry> typeEntry : indexEntry.getValue().entrySet()) { - out.writeString(typeEntry.getKey()); - out.writeVInt(typeEntry.getValue().size()); - for (Map.Entry fieldEntry : typeEntry.getValue().entrySet()) { - out.writeString(fieldEntry.getKey()); - FieldMappingMetadata fieldMapping = fieldEntry.getValue(); - out.writeString(fieldMapping.fullName()); - out.writeBytesReference(fieldMapping.source); - } + for (Map.Entry fieldEntry : indexEntry.getValue().entrySet()) { + out.writeString(fieldEntry.getKey()); + FieldMappingMetadata fieldMapping = fieldEntry.getValue(); + out.writeString(fieldMapping.fullName()); + out.writeBytesReference(fieldMapping.source); } } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponse.java index f0f5265367549..3be8e75be7290 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponse.java @@ -33,6 +33,7 @@ package org.opensearch.action.admin.indices.mapping.get; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; +import org.opensearch.Version; import org.opensearch.action.ActionResponse; import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.ParseField; @@ -42,119 +43,80 @@ import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.xcontent.ToXContentFragment; import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentParser; -import org.opensearch.rest.BaseRestHandler; +import org.opensearch.index.mapper.MapperService; import java.io.IOException; -import java.util.Map; - -import static org.opensearch.rest.BaseRestHandler.DEFAULT_INCLUDE_TYPE_NAME_POLICY; public class GetMappingsResponse extends ActionResponse implements ToXContentFragment { private static final ParseField MAPPINGS = new ParseField("mappings"); - private ImmutableOpenMap> mappings = ImmutableOpenMap.of(); + private final ImmutableOpenMap mappings; - public GetMappingsResponse(ImmutableOpenMap> mappings) { + public GetMappingsResponse(ImmutableOpenMap mappings) { this.mappings = mappings; } GetMappingsResponse(StreamInput in) throws IOException { super(in); int size = in.readVInt(); - ImmutableOpenMap.Builder> indexMapBuilder = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder indexMapBuilder = ImmutableOpenMap.builder(); for (int i = 0; i < size; i++) { - String key = in.readString(); - int valueSize = in.readVInt(); - ImmutableOpenMap.Builder typeMapBuilder = ImmutableOpenMap.builder(); - for (int j = 0; j < valueSize; j++) { - typeMapBuilder.put(in.readString(), new MappingMetadata(in)); + String index = in.readString(); + if (in.getVersion().before(Version.V_2_0_0)) { + int mappingCount = in.readVInt(); + if (mappingCount == 0) { + indexMapBuilder.put(index, MappingMetadata.EMPTY_MAPPINGS); + } else if (mappingCount == 1) { + String type = in.readString(); + if (MapperService.SINGLE_MAPPING_NAME.equals(type) == false) { + throw new IllegalStateException("Expected " + MapperService.SINGLE_MAPPING_NAME + " but got [" + type + "]"); + } + indexMapBuilder.put(index, new MappingMetadata(in)); + } else { + throw new IllegalStateException("Expected 0 or 1 mappings but got: " + mappingCount); + } + } else { + boolean hasMapping = in.readBoolean(); + indexMapBuilder.put(index, hasMapping ? new MappingMetadata(in) : MappingMetadata.EMPTY_MAPPINGS); } - indexMapBuilder.put(key, typeMapBuilder.build()); } mappings = indexMapBuilder.build(); } - public ImmutableOpenMap> mappings() { + public ImmutableOpenMap mappings() { return mappings; } - public ImmutableOpenMap> getMappings() { + public ImmutableOpenMap getMappings() { return mappings(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(mappings.size()); - for (ObjectObjectCursor> indexEntry : mappings) { + for (ObjectObjectCursor indexEntry : mappings) { out.writeString(indexEntry.key); - out.writeVInt(indexEntry.value.size()); - for (ObjectObjectCursor typeEntry : indexEntry.value) { - out.writeString(typeEntry.key); - typeEntry.value.writeTo(out); - } - } - } - - public static GetMappingsResponse fromXContent(XContentParser parser) throws IOException { - if (parser.currentToken() == null) { - parser.nextToken(); - } - assert parser.currentToken() == XContentParser.Token.START_OBJECT; - Map parts = parser.map(); - - ImmutableOpenMap.Builder> builder = new ImmutableOpenMap.Builder<>(); - for (Map.Entry entry : parts.entrySet()) { - final String indexName = entry.getKey(); - assert entry.getValue() instanceof Map : "expected a map as type mapping, but got: " + entry.getValue().getClass(); - final Map mapping = (Map) ((Map) entry.getValue()).get(MAPPINGS.getPreferredName()); - - ImmutableOpenMap.Builder typeBuilder = new ImmutableOpenMap.Builder<>(); - for (Map.Entry typeEntry : mapping.entrySet()) { - final String typeName = typeEntry.getKey(); - assert typeEntry.getValue() instanceof Map : "expected a map as inner type mapping, but got: " - + typeEntry.getValue().getClass(); - final Map fieldMappings = (Map) typeEntry.getValue(); - MappingMetadata mmd = new MappingMetadata(typeName, fieldMappings); - typeBuilder.put(typeName, mmd); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeVInt(indexEntry.value == MappingMetadata.EMPTY_MAPPINGS ? 0 : 1); + if (indexEntry.value != MappingMetadata.EMPTY_MAPPINGS) { + out.writeString(MapperService.SINGLE_MAPPING_NAME); + indexEntry.value.writeTo(out); + } + } else { + out.writeOptionalWriteable(indexEntry.value); } - builder.put(indexName, typeBuilder.build()); } - - return new GetMappingsResponse(builder.build()); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - boolean includeTypeName = params.paramAsBoolean(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - - for (final ObjectObjectCursor> indexEntry : getMappings()) { + for (final ObjectObjectCursor indexEntry : getMappings()) { builder.startObject(indexEntry.key); - { - if (includeTypeName == false) { - MappingMetadata mappings = null; - for (final ObjectObjectCursor typeEntry : indexEntry.value) { - if (typeEntry.key.equals("_default_") == false) { - assert mappings == null; - mappings = typeEntry.value; - } - } - if (mappings == null) { - // no mappings yet - builder.startObject(MAPPINGS.getPreferredName()).endObject(); - } else { - builder.field(MAPPINGS.getPreferredName(), mappings.sourceAsMap()); - } - } else { - builder.startObject(MAPPINGS.getPreferredName()); - { - for (final ObjectObjectCursor typeEntry : indexEntry.value) { - builder.field(typeEntry.key, typeEntry.value.sourceAsMap()); - } - } - builder.endObject(); - } + if (indexEntry.value != null) { + builder.field(MAPPINGS.getPreferredName(), indexEntry.value.sourceAsMap()); + } else { + builder.startObject(MAPPINGS.getPreferredName()).endObject(); } builder.endObject(); } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java index afc905bcac2e4..bdb5222a6dcba 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java @@ -81,9 +81,8 @@ protected void doExecute(Task task, GetFieldMappingsRequest request, final Actio if (concreteIndices.length == 0) { listener.onResponse(new GetFieldMappingsResponse(emptyMap())); } else { - boolean probablySingleFieldRequest = concreteIndices.length == 1 && request.types().length == 1 && request.fields().length == 1; for (final String index : concreteIndices) { - GetFieldMappingsIndexRequest shardRequest = new GetFieldMappingsIndexRequest(request, index, probablySingleFieldRequest); + GetFieldMappingsIndexRequest shardRequest = new GetFieldMappingsIndexRequest(request, index); shardAction.execute(shardRequest, new ActionListener() { @Override public void onResponse(GetFieldMappingsResponse result) { @@ -107,7 +106,7 @@ public void onFailure(Exception e) { } private GetFieldMappingsResponse merge(AtomicReferenceArray indexResponses) { - Map>> mergedResponses = new HashMap<>(); + Map> mergedResponses = new HashMap<>(); for (int i = 0; i < indexResponses.length(); i++) { Object element = indexResponses.get(i); if (element instanceof GetFieldMappingsResponse) { diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java index c1248b69381d5..ca07475f0deab 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java @@ -56,12 +56,10 @@ import org.opensearch.index.mapper.Mapper; import org.opensearch.index.shard.ShardId; import org.opensearch.indices.IndicesService; -import org.opensearch.indices.TypeMissingException; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; import java.io.IOException; -import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -124,28 +122,9 @@ protected GetFieldMappingsResponse shardOperation(final GetFieldMappingsIndexReq Predicate metadataFieldPredicate = (f) -> indicesService.isMetadataField(indexCreatedVersion, f); Predicate fieldPredicate = metadataFieldPredicate.or(indicesService.getFieldFilter().apply(shardId.getIndexName())); - DocumentMapper mapper = indexService.mapperService().documentMapper(); - Collection typeIntersection; - if (request.types().length == 0) { - typeIntersection = mapper == null ? Collections.emptySet() : Collections.singleton(mapper.type()); - } else { - typeIntersection = mapper != null && Regex.simpleMatch(request.types(), mapper.type()) - ? Collections.singleton(mapper.type()) - : Collections.emptySet(); - if (typeIntersection.isEmpty()) { - throw new TypeMissingException(shardId.getIndex(), request.types()); - } - } - - Map> typeMappings = new HashMap<>(); - for (String type : typeIntersection) { - DocumentMapper documentMapper = indexService.mapperService().documentMapper(type); - Map fieldMapping = findFieldMappingsByType(fieldPredicate, documentMapper, request); - if (!fieldMapping.isEmpty()) { - typeMappings.put(type, fieldMapping); - } - } - return new GetFieldMappingsResponse(singletonMap(shardId.getIndexName(), Collections.unmodifiableMap(typeMappings))); + DocumentMapper documentMapper = indexService.mapperService().documentMapper(); + Map fieldMapping = findFieldMappings(fieldPredicate, documentMapper, request); + return new GetFieldMappingsResponse(singletonMap(shardId.getIndexName(), fieldMapping)); } @Override @@ -195,11 +174,14 @@ public Boolean paramAsBoolean(String key, Boolean defaultValue) { } }; - private static Map findFieldMappingsByType( + private static Map findFieldMappings( Predicate fieldPredicate, DocumentMapper documentMapper, GetFieldMappingsIndexRequest request ) { + if (documentMapper == null) { + return Collections.emptyMap(); + } Map fieldMappings = new HashMap<>(); final MappingLookup allFieldMappers = documentMapper.mappers(); for (String field : request.fields()) { @@ -218,8 +200,6 @@ private static Map findFieldMappingsByType( Mapper fieldMapper = allFieldMappers.getMapper(field); if (fieldMapper != null) { addFieldMapper(fieldPredicate, field, fieldMapper, fieldMappings, request.includeDefaults()); - } else if (request.probablySingleFieldRequest()) { - fieldMappings.put(field, FieldMappingMetadata.NULL); } } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetMappingsAction.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetMappingsAction.java index 007550f73f205..3f6cb8ed35af9 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetMappingsAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetMappingsAction.java @@ -91,8 +91,8 @@ protected void doMasterOperation( ) { logger.trace("serving getMapping request based on version {}", state.version()); try { - ImmutableOpenMap> result = state.metadata() - .findMappings(concreteIndices, request.types(), indicesService.getFieldFilter()); + ImmutableOpenMap result = state.metadata() + .findMappings(concreteIndices, indicesService.getFieldFilter()); listener.onResponse(new GetMappingsResponse(result)); } catch (IOException e) { listener.onFailure(e); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingClusterStateUpdateRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingClusterStateUpdateRequest.java index 2237ac573570a..27081048fcdae 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingClusterStateUpdateRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingClusterStateUpdateRequest.java @@ -39,29 +39,13 @@ */ public class PutMappingClusterStateUpdateRequest extends IndicesClusterStateUpdateRequest { - private String type; - private String source; - public PutMappingClusterStateUpdateRequest() { - - } - - public String type() { - return type; - } - - public PutMappingClusterStateUpdateRequest type(String type) { - this.type = type; - return this; + public PutMappingClusterStateUpdateRequest(String source) { + this.source = source; } public String source() { return source; } - - public PutMappingClusterStateUpdateRequest source(String source) { - this.source = source; - return this; - } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java index d8b3b781b6787..52be45054ba55 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java @@ -35,6 +35,7 @@ import com.carrotsearch.hppc.ObjectHashSet; import org.opensearch.LegacyESVersion; import org.opensearch.OpenSearchGenerationException; +import org.opensearch.Version; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.IndicesRequest; import org.opensearch.action.support.IndicesOptions; @@ -52,6 +53,7 @@ import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.Index; +import org.opensearch.index.mapper.MapperService; import java.io.IOException; import java.io.InputStream; @@ -63,7 +65,7 @@ import static org.opensearch.action.ValidateActions.addValidationError; /** - * Puts mapping definition registered under a specific type into one or more indices. Best created with + * Puts mapping definition into one or more indices. Best created with * {@link org.opensearch.client.Requests#putMappingRequest(String...)}. *

        * If the mappings already exists, the new mappings will be merged with the new one. If there are elements @@ -95,8 +97,6 @@ public class PutMappingRequest extends AcknowledgedRequest im private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, true); - private String type; - private String source; private String origin = ""; @@ -108,7 +108,12 @@ public PutMappingRequest(StreamInput in) throws IOException { super(in); indices = in.readStringArray(); indicesOptions = IndicesOptions.readIndicesOptions(in); - type = in.readOptionalString(); + if (in.getVersion().before(Version.V_2_0_0)) { + String type = in.readOptionalString(); + if (MapperService.SINGLE_MAPPING_NAME.equals(type) == false) { + throw new IllegalArgumentException("Expected type [_doc] but received [" + type + "]"); + } + } source = in.readString(); if (in.getVersion().before(LegacyESVersion.V_7_0_0)) { in.readBoolean(); // updateAllTypes @@ -133,11 +138,6 @@ public PutMappingRequest(String... indices) { @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; - if (type == null) { - validationException = addValidationError("mapping type is missing", validationException); - } else if (type.isEmpty()) { - validationException = addValidationError("mapping type is empty", validationException); - } if (source == null) { validationException = addValidationError("mapping source is missing", validationException); } else if (source.isEmpty()) { @@ -203,21 +203,6 @@ public boolean includeDataStreams() { return true; } - /** - * The mapping type. - */ - public String type() { - return type; - } - - /** - * The type of the mappings. - */ - public PutMappingRequest type(String type) { - this.type = type; - return this; - } - /** * The mapping source definition. */ @@ -233,7 +218,7 @@ public String source() { * mapping fields will automatically be put on the top level mapping object. */ public PutMappingRequest source(Object... source) { - return source(buildFromSimplifiedDef(type, source)); + return source(buildFromSimplifiedDef(source)); } public String origin() { @@ -247,8 +232,6 @@ public PutMappingRequest origin(String origin) { } /** - * @param type - * the mapping type * @param source * consisting of field/properties pairs (e.g. "field1", * "type=string,store=true") @@ -256,16 +239,13 @@ public PutMappingRequest origin(String origin) { * if the number of the source arguments is not divisible by two * @return the mappings definition */ - public static XContentBuilder buildFromSimplifiedDef(String type, Object... source) { + public static XContentBuilder buildFromSimplifiedDef(Object... source) { if (source.length % 2 != 0) { throw new IllegalArgumentException("mapping source must be pairs of fieldnames and properties definition."); } try { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); - if (type != null) { - builder.startObject(type); - } for (int i = 0; i < source.length; i++) { String fieldName = source[i++].toString(); @@ -302,9 +282,6 @@ public static XContentBuilder buildFromSimplifiedDef(String type, Object... sour builder.endObject(); } builder.endObject(); - if (type != null) { - builder.endObject(); - } builder.endObject(); return builder; } catch (Exception e) { @@ -366,7 +343,9 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeStringArrayNullable(indices); indicesOptions.writeIndicesOptions(out); - out.writeOptionalString(type); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeOptionalString(MapperService.SINGLE_MAPPING_NAME); + } out.writeString(source); if (out.getVersion().before(LegacyESVersion.V_7_0_0)) { out.writeBoolean(true); // updateAllTypes diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java index fcf35891df872..a1b3b40d4e961 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java @@ -74,14 +74,6 @@ public PutMappingRequestBuilder setIndicesOptions(IndicesOptions options) { return this; } - /** - * The type of the mappings. - */ - public PutMappingRequestBuilder setType(String type) { - request.type(type); - return this; - } - /** * The mapping source definition. */ diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/TransportPutMappingAction.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/TransportPutMappingAction.java index 6c580ec8aa22e..f1093a15a3d26 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/TransportPutMappingAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/TransportPutMappingAction.java @@ -132,10 +132,7 @@ protected void masterOperation( } performMappingUpdate(concreteIndices, request, listener, metadataMappingService); } catch (IndexNotFoundException ex) { - logger.debug( - () -> new ParameterizedMessage("failed to put mappings on indices [{}], type [{}]", request.indices(), request.type()), - ex - ); + logger.debug(() -> new ParameterizedMessage("failed to put mappings on indices [{}]", Arrays.asList(request.indices())), ex); throw ex; } } @@ -170,11 +167,9 @@ static void performMappingUpdate( ActionListener listener, MetadataMappingService metadataMappingService ) { - PutMappingClusterStateUpdateRequest updateRequest = new PutMappingClusterStateUpdateRequest().ackTimeout(request.timeout()) - .masterNodeTimeout(request.masterNodeTimeout()) - .indices(concreteIndices) - .type(request.type()) - .source(request.source()); + PutMappingClusterStateUpdateRequest updateRequest = new PutMappingClusterStateUpdateRequest(request.source()).indices( + concreteIndices + ).ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout()); metadataMappingService.putMapping(updateRequest, new ActionListener() { diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/MetadataRolloverService.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/MetadataRolloverService.java index 9e1bff73b7038..19a7b8c95199b 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/MetadataRolloverService.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/MetadataRolloverService.java @@ -390,7 +390,9 @@ static void validate(Metadata metadata, String rolloverTarget, String newIndexNa if (Strings.isNullOrEmpty(newIndexName) == false) { throw new IllegalArgumentException("new index name may not be specified when rolling over a data stream"); } - if ((request.settings().equals(Settings.EMPTY) == false) || (request.aliases().size() > 0) || (request.mappings().size() > 0)) { + if ((request.settings().equals(Settings.EMPTY) == false) + || (request.aliases().size() > 0) + || (request.mappings().equals("{}") == false)) { throw new IllegalArgumentException( "aliases, mappings, and index settings may not be specified when rolling over a data stream" ); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java index 94028f315a704..f06cb599a60df 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java @@ -43,8 +43,6 @@ import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.ObjectParser; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.index.mapper.MapperService; @@ -60,9 +58,9 @@ * Note: there is a new class with the same name for the Java HLRC that uses a typeless format. * Any changes done to this class should also go to that client class. */ -public class RolloverRequest extends AcknowledgedRequest implements IndicesRequest, ToXContentObject { +public class RolloverRequest extends AcknowledgedRequest implements IndicesRequest { - private static final ObjectParser PARSER = new ObjectParser<>("rollover"); + private static final ObjectParser PARSER = new ObjectParser<>("rollover"); private static final ObjectParser>, Void> CONDITION_PARSER = new ObjectParser<>("conditions"); private static final ParseField CONDITIONS = new ParseField("conditions"); @@ -97,24 +95,13 @@ public class RolloverRequest extends AcknowledgedRequest implem CreateIndexRequest.SETTINGS, ObjectParser.ValueType.OBJECT ); - PARSER.declareField((parser, request, includeTypeName) -> { - if (includeTypeName) { - for (Map.Entry mappingsEntry : parser.map().entrySet()) { - request.createIndexRequest.mapping(mappingsEntry.getKey(), (Map) mappingsEntry.getValue()); - } - } else { - // a type is not included, add a dummy _doc type - Map mappings = parser.map(); - if (MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, mappings)) { - throw new IllegalArgumentException( - "The mapping definition cannot be nested under a type " - + "[" - + MapperService.SINGLE_MAPPING_NAME - + "] unless include_type_name is set to true." - ); - } - request.createIndexRequest.mapping(MapperService.SINGLE_MAPPING_NAME, mappings); + PARSER.declareField((parser, request, context) -> { + // a type is not included, add a dummy _doc type + Map mappings = parser.map(); + if (MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, mappings)) { + throw new IllegalArgumentException("The mapping definition cannot be nested under a type"); } + request.createIndexRequest.mapping(MapperService.SINGLE_MAPPING_NAME, mappings); }, CreateIndexRequest.MAPPINGS, ObjectParser.ValueType.OBJECT); PARSER.declareField( (parser, request, context) -> request.createIndexRequest.aliases(parser.map()), @@ -273,23 +260,8 @@ public CreateIndexRequest getCreateIndexRequest() { return createIndexRequest; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - createIndexRequest.innerToXContent(builder, params); - - builder.startObject(CONDITIONS.getPreferredName()); - for (Condition condition : conditions.values()) { - condition.toXContent(builder, params); - } - builder.endObject(); - - builder.endObject(); - return builder; - } - // param isTypeIncluded decides how mappings should be parsed from XContent - public void fromXContent(boolean isTypeIncluded, XContentParser parser) throws IOException { - PARSER.parse(parser, this, isTypeIncluded); + public void fromXContent(XContentParser parser) throws IOException { + PARSER.parse(parser, this, null); } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java index 6f631e7c086a9..a7af2f963d15b 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java @@ -38,7 +38,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.xcontent.XContentType; public class RolloverRequestBuilder extends MasterNodeOperationRequestBuilder { public RolloverRequestBuilder(OpenSearchClient client, RolloverAction action) { @@ -90,11 +89,6 @@ public RolloverRequestBuilder mapping(String type, Object... source) { return this; } - public RolloverRequestBuilder mapping(String type, String source, XContentType xContentType) { - this.request.getCreateIndexRequest().mapping(type, source, xContentType); - return this; - } - /** * Sets the number of shard copies that should be active for creation of the * new rollover index to return. Defaults to {@link ActiveShardCount#DEFAULT}, which will diff --git a/server/src/main/java/org/opensearch/action/admin/indices/stats/CommonStats.java b/server/src/main/java/org/opensearch/action/admin/indices/stats/CommonStats.java index 015d52f15f907..2949af00a30d0 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/stats/CommonStats.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/stats/CommonStats.java @@ -190,7 +190,7 @@ public CommonStats(IndicesQueryCache indicesQueryCache, IndexShard indexShard, C store = indexShard.storeStats(); break; case Indexing: - indexing = indexShard.indexingStats(flags.types()); + indexing = indexShard.indexingStats(); break; case Get: get = indexShard.getStats(); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/stats/CommonStatsFlags.java b/server/src/main/java/org/opensearch/action/admin/indices/stats/CommonStatsFlags.java index 6c6c94d84127c..e17b497ce312a 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/stats/CommonStatsFlags.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/stats/CommonStatsFlags.java @@ -34,6 +34,7 @@ import org.opensearch.LegacyESVersion; import org.opensearch.Version; +import org.opensearch.common.Strings; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.io.stream.Writeable; @@ -48,7 +49,6 @@ public class CommonStatsFlags implements Writeable, Cloneable { public static final CommonStatsFlags NONE = new CommonStatsFlags().clear(); private EnumSet flags = EnumSet.allOf(Flag.class); - private String[] types = null; private String[] groups = null; private String[] fieldDataFields = null; private String[] completionDataFields = null; @@ -75,7 +75,9 @@ public CommonStatsFlags(StreamInput in) throws IOException { flags.add(flag); } } - types = in.readStringArray(); + if (in.getVersion().before(Version.V_2_0_0)) { + in.readStringArray(); + } groups = in.readStringArray(); fieldDataFields = in.readStringArray(); completionDataFields = in.readStringArray(); @@ -97,7 +99,9 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeLong(longFlags); - out.writeStringArrayNullable(types); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeStringArrayNullable(Strings.EMPTY_ARRAY); + } out.writeStringArrayNullable(groups); out.writeStringArrayNullable(fieldDataFields); out.writeStringArrayNullable(completionDataFields); @@ -116,7 +120,6 @@ public void writeTo(StreamOutput out) throws IOException { */ public CommonStatsFlags all() { flags = EnumSet.allOf(Flag.class); - types = null; groups = null; fieldDataFields = null; completionDataFields = null; @@ -132,7 +135,6 @@ public CommonStatsFlags all() { */ public CommonStatsFlags clear() { flags = EnumSet.noneOf(Flag.class); - types = null; groups = null; fieldDataFields = null; completionDataFields = null; @@ -151,23 +153,6 @@ public Flag[] getFlags() { return flags.toArray(new Flag[flags.size()]); } - /** - * Document types to return stats for. Mainly affects {@link Flag#Indexing} when - * enabled, returning specific indexing stats for those types. - */ - public CommonStatsFlags types(String... types) { - this.types = types; - return this; - } - - /** - * Document types to return stats for. Mainly affects {@link Flag#Indexing} when - * enabled, returning specific indexing stats for those types. - */ - public String[] types() { - return this.types; - } - /** * Sets specific search group stats to retrieve the stats for. Mainly affects search * when enabled. diff --git a/server/src/main/java/org/opensearch/action/admin/indices/stats/IndicesStatsRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/stats/IndicesStatsRequest.java index c5e99119d3cb7..bbe69b700b876 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/stats/IndicesStatsRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/stats/IndicesStatsRequest.java @@ -90,23 +90,6 @@ public IndicesStatsRequest flags(CommonStatsFlags flags) { return this; } - /** - * Document types to return stats for. Mainly affects {@link #indexing(boolean)} when - * enabled, returning specific indexing stats for those types. - */ - public IndicesStatsRequest types(String... types) { - flags.types(types); - return this; - } - - /** - * Document types to return stats for. Mainly affects {@link #indexing(boolean)} when - * enabled, returning specific indexing stats for those types. - */ - public String[] types() { - return this.flags.types(); - } - /** * Sets specific search group stats to retrieve the stats for. Mainly affects search * when enabled. diff --git a/server/src/main/java/org/opensearch/action/admin/indices/stats/IndicesStatsRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/stats/IndicesStatsRequestBuilder.java index afb0790367c7f..23c33401966b4 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/stats/IndicesStatsRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/stats/IndicesStatsRequestBuilder.java @@ -78,15 +78,6 @@ public final IndicesStatsRequestBuilder setTimeout(TimeValue timeout) { return this; } - /** - * Document types to return stats for. Mainly affects {@link #setIndexing(boolean)} when - * enabled, returning specific indexing stats for those types. - */ - public IndicesStatsRequestBuilder setTypes(String... types) { - request.types(types); - return this; - } - public IndicesStatsRequestBuilder setGroups(String... groups) { request.groups(groups); return this; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponse.java index 5a596b090133f..e6d487e0a40b3 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponse.java @@ -38,15 +38,13 @@ import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.ToXContentObject; import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Objects; import static java.util.Collections.singletonMap; -import static org.opensearch.rest.BaseRestHandler.DEFAULT_INCLUDE_TYPE_NAME_POLICY; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; public class GetIndexTemplatesResponse extends ActionResponse implements ToXContentObject { @@ -57,7 +55,7 @@ public GetIndexTemplatesResponse(StreamInput in) throws IOException { int size = in.readVInt(); indexTemplates = new ArrayList<>(); for (int i = 0; i < size; i++) { - indexTemplates.add(0, IndexTemplateMetadata.readFrom(in)); + indexTemplates.add(IndexTemplateMetadata.readFrom(in)); } } @@ -77,32 +75,28 @@ public void writeTo(StreamOutput out) throws IOException { } } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetIndexTemplatesResponse that = (GetIndexTemplatesResponse) o; + return Objects.equals(indexTemplates, that.indexTemplates); + } + + @Override + public int hashCode() { + return Objects.hash(indexTemplates); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { params = new ToXContent.DelegatingMapParams(singletonMap("reduce_mappings", "true"), params); - boolean includeTypeName = params.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - builder.startObject(); for (IndexTemplateMetadata indexTemplateMetadata : getIndexTemplates()) { - if (includeTypeName) { - IndexTemplateMetadata.Builder.toXContentWithTypes(indexTemplateMetadata, builder, params); - } else { - IndexTemplateMetadata.Builder.toXContent(indexTemplateMetadata, builder, params); - } + IndexTemplateMetadata.Builder.toXContent(indexTemplateMetadata, builder, params); } builder.endObject(); return builder; } - - public static GetIndexTemplatesResponse fromXContent(XContentParser parser) throws IOException { - final List templates = new ArrayList<>(); - for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) { - if (token == XContentParser.Token.FIELD_NAME) { - final IndexTemplateMetadata templateMetadata = IndexTemplateMetadata.Builder.fromXContent(parser, parser.currentName()); - templates.add(templateMetadata); - } - } - return new GetIndexTemplatesResponse(templates); - } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java b/server/src/main/java/org/opensearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java index 5ca0c6e36e1ae..598b5bdbf6d3b 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java @@ -252,8 +252,8 @@ public static Template resolveTemplate( Map aliasesByName = aliases.stream().collect(Collectors.toMap(AliasMetadata::getAlias, Function.identity())); // empty request mapping as the user can't specify any explicit mappings via the simulate api - List>> mappings = MetadataCreateIndexService.collectV2Mappings( - Collections.emptyMap(), + List> mappings = MetadataCreateIndexService.collectV2Mappings( + "{}", simulatedState, matchingTemplate, xContentRegistry, @@ -264,11 +264,9 @@ public static Template resolveTemplate( indexMetadata, tempIndexService -> { MapperService mapperService = tempIndexService.mapperService(); - for (Map> mapping : mappings) { - if (!mapping.isEmpty()) { - assert mapping.size() == 1 : mapping; - Map.Entry> entry = mapping.entrySet().iterator().next(); - mapperService.merge(entry.getKey(), entry.getValue(), MapperService.MergeReason.INDEX_TEMPLATE); + for (Map mapping : mappings) { + if (mapping.isEmpty() == false) { + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, mapping, MapperService.MergeReason.INDEX_TEMPLATE); } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index d331a1f9a559e..2ea2e492ffe4d 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -304,7 +304,7 @@ public PutIndexTemplateRequest mapping(String type, Map source) * ("field1", "type=string,store=true"). */ public PutIndexTemplateRequest mapping(String type, Object... source) { - mapping(type, PutMappingRequest.buildFromSimplifiedDef(type, source)); + mapping(type, PutMappingRequest.buildFromSimplifiedDef(source)); return this; } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java index 2b3a55d7d5988..4d6525d002381 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java @@ -32,8 +32,8 @@ package org.opensearch.action.admin.indices.validate.query; +import org.opensearch.Version; import org.opensearch.action.support.broadcast.BroadcastShardRequest; -import org.opensearch.common.Strings; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.index.query.QueryBuilder; @@ -49,7 +49,6 @@ public class ShardValidateQueryRequest extends BroadcastShardRequest { private QueryBuilder query; - private String[] types = Strings.EMPTY_ARRAY; private boolean explain; private boolean rewrite; private long nowInMillis; @@ -58,12 +57,12 @@ public class ShardValidateQueryRequest extends BroadcastShardRequest { public ShardValidateQueryRequest(StreamInput in) throws IOException { super(in); query = in.readNamedWriteable(QueryBuilder.class); - - int typesSize = in.readVInt(); - if (typesSize > 0) { - types = new String[typesSize]; - for (int i = 0; i < typesSize; i++) { - types[i] = in.readString(); + if (in.getVersion().before(Version.V_2_0_0)) { + int typesSize = in.readVInt(); + if (typesSize > 0) { + for (int i = 0; i < typesSize; i++) { + in.readString(); + } } } filteringAliases = new AliasFilter(in); @@ -75,7 +74,6 @@ public ShardValidateQueryRequest(StreamInput in) throws IOException { public ShardValidateQueryRequest(ShardId shardId, AliasFilter filteringAliases, ValidateQueryRequest request) { super(shardId, request); this.query = request.query(); - this.types = request.types(); this.explain = request.explain(); this.rewrite = request.rewrite(); this.filteringAliases = Objects.requireNonNull(filteringAliases, "filteringAliases must not be null"); @@ -86,10 +84,6 @@ public QueryBuilder query() { return query; } - public String[] types() { - return this.types; - } - public boolean explain() { return this.explain; } @@ -110,9 +104,8 @@ public long nowInMillis() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeNamedWriteable(query); - out.writeVInt(types.length); - for (String type : types) { - out.writeString(type); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeVInt(0); // no types to filter } filteringAliases.writeTo(out); out.writeBoolean(explain); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/validate/query/TransportValidateQueryAction.java b/server/src/main/java/org/opensearch/action/admin/indices/validate/query/TransportValidateQueryAction.java index b3c8acd2de3f5..1849b41ce707f 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/validate/query/TransportValidateQueryAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/validate/query/TransportValidateQueryAction.java @@ -131,7 +131,7 @@ protected void doExecute(Task task, ValidateQueryRequest request, ActionListener if (request.query() == null) { rewriteListener.onResponse(request.query()); } else { - Rewriteable.rewriteAndFetch(request.query(), searchService.getRewriteContext(timeProvider), rewriteListener); + Rewriteable.rewriteAndFetch(request.query(), searchService.getValidationRewriteContext(timeProvider), rewriteListener); } } @@ -222,11 +222,10 @@ protected ShardValidateQueryResponse shardOperation(ShardValidateQueryRequest re String error = null; ShardSearchRequest shardSearchLocalRequest = new ShardSearchRequest( request.shardId(), - request.types(), request.nowInMillis(), request.filteringAliases() ); - SearchContext searchContext = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT); + SearchContext searchContext = searchService.createValidationContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT); try { ParsedQuery parsedQuery = searchContext.getQueryShardContext().toQuery(request.query()); searchContext.parsedQuery(parsedQuery); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ValidateQueryRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ValidateQueryRequest.java index 81c42ded2ce39..1bb85c4e84483 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ValidateQueryRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ValidateQueryRequest.java @@ -32,6 +32,7 @@ package org.opensearch.action.admin.indices.validate.query; +import org.opensearch.Version; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.ValidateActions; import org.opensearch.action.support.IndicesOptions; @@ -60,8 +61,6 @@ public class ValidateQueryRequest extends BroadcastRequest private boolean rewrite; private boolean allShards; - private String[] types = Strings.EMPTY_ARRAY; - long nowInMillis; public ValidateQueryRequest() { @@ -71,11 +70,12 @@ public ValidateQueryRequest() { public ValidateQueryRequest(StreamInput in) throws IOException { super(in); query = in.readNamedWriteable(QueryBuilder.class); - int typesSize = in.readVInt(); - if (typesSize > 0) { - types = new String[typesSize]; - for (int i = 0; i < typesSize; i++) { - types[i] = in.readString(); + if (in.getVersion().before(Version.V_2_0_0)) { + int typesSize = in.readVInt(); + if (typesSize > 0) { + for (int i = 0; i < typesSize; i++) { + in.readString(); + } } } explain = in.readBoolean(); @@ -113,29 +113,6 @@ public ValidateQueryRequest query(QueryBuilder query) { return this; } - /** - * The types of documents the query will run against. Defaults to all types. - * - * @deprecated Types are in the process of being removed. Instead of using a type, prefer to - * filter on a field on the document. - */ - @Deprecated - public String[] types() { - return this.types; - } - - /** - * The types of documents the query will run against. Defaults to all types. - * - * @deprecated Types are in the process of being removed. Instead of using a type, prefer to - * filter on a field on the document. - */ - @Deprecated - public ValidateQueryRequest types(String... types) { - this.types = types; - return this; - } - /** * Indicate if detailed information about query is requested */ @@ -182,9 +159,8 @@ public boolean allShards() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeNamedWriteable(query); - out.writeVInt(types.length); - for (String type : types) { - out.writeString(type); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeVInt(0); // no types to filter } out.writeBoolean(explain); out.writeBoolean(rewrite); @@ -196,8 +172,7 @@ public String toString() { return "[" + Arrays.toString(indices) + "]" - + Arrays.toString(types) - + ", query[" + + " query[" + query + "], explain:" + explain diff --git a/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ValidateQueryRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ValidateQueryRequestBuilder.java index de4f619804b20..88261e6536240 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ValidateQueryRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/validate/query/ValidateQueryRequestBuilder.java @@ -45,14 +45,6 @@ public ValidateQueryRequestBuilder(OpenSearchClient client, ValidateQueryAction super(client, action, new ValidateQueryRequest()); } - /** - * The types of documents the query will run against. Defaults to all types. - */ - public ValidateQueryRequestBuilder setTypes(String... types) { - request.types(types); - return this; - } - /** * The query to validate. * diff --git a/server/src/main/java/org/opensearch/action/bulk/BulkItemRequest.java b/server/src/main/java/org/opensearch/action/bulk/BulkItemRequest.java index a45b269f8c818..2002d5864e966 100644 --- a/server/src/main/java/org/opensearch/action/bulk/BulkItemRequest.java +++ b/server/src/main/java/org/opensearch/action/bulk/BulkItemRequest.java @@ -105,13 +105,7 @@ void setPrimaryResponse(BulkItemResponse primaryResponse) { */ public void abort(String index, Exception cause) { if (primaryResponse == null) { - final BulkItemResponse.Failure failure = new BulkItemResponse.Failure( - index, - request.type(), - request.id(), - Objects.requireNonNull(cause), - true - ); + final BulkItemResponse.Failure failure = new BulkItemResponse.Failure(index, request.id(), Objects.requireNonNull(cause), true); setPrimaryResponse(new BulkItemResponse(id, request.opType(), failure)); } else { assert primaryResponse.isFailed() && primaryResponse.getFailure().isAborted() : "response [" diff --git a/server/src/main/java/org/opensearch/action/bulk/BulkItemResponse.java b/server/src/main/java/org/opensearch/action/bulk/BulkItemResponse.java index af34789401a6b..fdb27a00bac2d 100644 --- a/server/src/main/java/org/opensearch/action/bulk/BulkItemResponse.java +++ b/server/src/main/java/org/opensearch/action/bulk/BulkItemResponse.java @@ -35,6 +35,7 @@ import org.opensearch.ExceptionsHelper; import org.opensearch.LegacyESVersion; import org.opensearch.OpenSearchException; +import org.opensearch.Version; import org.opensearch.action.DocWriteRequest.OpType; import org.opensearch.action.DocWriteResponse; import org.opensearch.action.delete.DeleteResponse; @@ -51,6 +52,7 @@ import org.opensearch.common.xcontent.ToXContentFragment; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentParser; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.index.shard.ShardId; import org.opensearch.rest.RestStatus; @@ -69,7 +71,6 @@ public class BulkItemResponse implements Writeable, StatusToXContentObject { private static final String _INDEX = "_index"; - private static final String _TYPE = "_type"; private static final String _ID = "_id"; private static final String STATUS = "status"; private static final String ERROR = "error"; @@ -88,7 +89,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(STATUS, response.status().getStatus()); } else { builder.field(_INDEX, failure.getIndex()); - builder.field(_TYPE, failure.getType()); builder.field(_ID, failure.getId()); builder.field(STATUS, failure.getStatus().getStatus()); builder.startObject(ERROR); @@ -166,7 +166,7 @@ public static BulkItemResponse fromXContent(XContentParser parser, int id) throw BulkItemResponse bulkItemResponse; if (exception != null) { - Failure failure = new Failure(builder.getShardId().getIndexName(), builder.getType(), builder.getId(), exception, status); + Failure failure = new Failure(builder.getShardId().getIndexName(), builder.getId(), exception, status); bulkItemResponse = new BulkItemResponse(id, opType, failure); } else { bulkItemResponse = new BulkItemResponse(id, opType, builder.build()); @@ -179,13 +179,11 @@ public static BulkItemResponse fromXContent(XContentParser parser, int id) throw */ public static class Failure implements Writeable, ToXContentFragment { public static final String INDEX_FIELD = "index"; - public static final String TYPE_FIELD = "type"; public static final String ID_FIELD = "id"; public static final String CAUSE_FIELD = "cause"; public static final String STATUS_FIELD = "status"; private final String index; - private final String type; private final String id; private final Exception cause; private final RestStatus status; @@ -196,11 +194,10 @@ public static class Failure implements Writeable, ToXContentFragment { public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "bulk_failures", true, - a -> new Failure((String) a[0], (String) a[1], (String) a[2], (Exception) a[3], RestStatus.fromCode((int) a[4])) + a -> new Failure((String) a[0], (String) a[1], (Exception) a[2], RestStatus.fromCode((int) a[3])) ); static { PARSER.declareString(constructorArg(), new ParseField(INDEX_FIELD)); - PARSER.declareString(constructorArg(), new ParseField(TYPE_FIELD)); PARSER.declareString(optionalConstructorArg(), new ParseField(ID_FIELD)); PARSER.declareObject(constructorArg(), (p, c) -> OpenSearchException.fromXContent(p), new ParseField(CAUSE_FIELD)); PARSER.declareInt(constructorArg(), new ParseField(STATUS_FIELD)); @@ -209,13 +206,12 @@ public static class Failure implements Writeable, ToXContentFragment { /** * For write failures before operation was assigned a sequence number. * - * use @{link {@link #Failure(String, String, String, Exception, long, long)}} + * use @{link {@link #Failure(String, String, Exception, long, long)}} * to record operation sequence no with failure */ - public Failure(String index, String type, String id, Exception cause) { + public Failure(String index, String id, Exception cause) { this( index, - type, id, cause, ExceptionsHelper.status(cause), @@ -225,10 +221,9 @@ public Failure(String index, String type, String id, Exception cause) { ); } - public Failure(String index, String type, String id, Exception cause, boolean aborted) { + public Failure(String index, String id, Exception cause, boolean aborted) { this( index, - type, id, cause, ExceptionsHelper.status(cause), @@ -238,18 +233,17 @@ public Failure(String index, String type, String id, Exception cause, boolean ab ); } - public Failure(String index, String type, String id, Exception cause, RestStatus status) { - this(index, type, id, cause, status, SequenceNumbers.UNASSIGNED_SEQ_NO, SequenceNumbers.UNASSIGNED_PRIMARY_TERM, false); + public Failure(String index, String id, Exception cause, RestStatus status) { + this(index, id, cause, status, SequenceNumbers.UNASSIGNED_SEQ_NO, SequenceNumbers.UNASSIGNED_PRIMARY_TERM, false); } /** For write failures after operation was assigned a sequence number. */ - public Failure(String index, String type, String id, Exception cause, long seqNo, long term) { - this(index, type, id, cause, ExceptionsHelper.status(cause), seqNo, term, false); + public Failure(String index, String id, Exception cause, long seqNo, long term) { + this(index, id, cause, ExceptionsHelper.status(cause), seqNo, term, false); } - private Failure(String index, String type, String id, Exception cause, RestStatus status, long seqNo, long term, boolean aborted) { + private Failure(String index, String id, Exception cause, RestStatus status, long seqNo, long term, boolean aborted) { this.index = index; - this.type = type; this.id = id; this.cause = cause; this.status = status; @@ -263,7 +257,11 @@ private Failure(String index, String type, String id, Exception cause, RestStatu */ public Failure(StreamInput in) throws IOException { index = in.readString(); - type = in.readString(); + if (in.getVersion().before(Version.V_2_0_0)) { + in.readString(); + // can't make an assertion about type names here because too many tests still set their own + // types bypassing various checks + } id = in.readOptionalString(); cause = in.readException(); status = ExceptionsHelper.status(cause); @@ -279,7 +277,9 @@ public Failure(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(index); - out.writeString(type); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeString(MapperService.SINGLE_MAPPING_NAME); + } out.writeOptionalString(id); out.writeException(cause); out.writeZLong(seqNo); @@ -296,13 +296,6 @@ public String getIndex() { return this.index; } - /** - * The type of the action. - */ - public String getType() { - return type; - } - /** * The id of the action. */ @@ -361,7 +354,6 @@ public boolean isAborted() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(INDEX_FIELD, index); - builder.field(TYPE_FIELD, type); if (id != null) { builder.field(ID_FIELD, id); } @@ -468,16 +460,6 @@ public String getIndex() { return response.getIndex(); } - /** - * The type of the action. - */ - public String getType() { - if (failure != null) { - return failure.getType(); - } - return response.getType(); - } - /** * The id of the action. */ diff --git a/server/src/main/java/org/opensearch/action/bulk/BulkPrimaryExecutionContext.java b/server/src/main/java/org/opensearch/action/bulk/BulkPrimaryExecutionContext.java index 1ccf5f1924f8f..da8833fe49a29 100644 --- a/server/src/main/java/org/opensearch/action/bulk/BulkPrimaryExecutionContext.java +++ b/server/src/main/java/org/opensearch/action/bulk/BulkPrimaryExecutionContext.java @@ -109,8 +109,8 @@ private static boolean isAborted(BulkItemResponse response) { /** move to the next item to execute */ private void advance() { - assert currentItemState == ItemProcessingState.COMPLETED - || currentIndex == -1 : "moving to next but current item wasn't completed (state: " + currentItemState + ")"; + assert currentItemState == ItemProcessingState.COMPLETED || currentIndex == -1 + : "moving to next but current item wasn't completed (state: " + currentItemState + ")"; currentItemState = ItemProcessingState.INITIAL; currentIndex = findNextNonAborted(currentIndex + 1); retryCounter = 0; @@ -251,7 +251,7 @@ public void failOnMappingUpdate(Exception cause) { docWriteRequest.opType(), // Make sure to use getCurrentItem().index() here, if you use docWriteRequest.index() it will use the // concrete index instead of an alias if used! - new BulkItemResponse.Failure(getCurrentItem().index(), docWriteRequest.type(), docWriteRequest.id(), cause) + new BulkItemResponse.Failure(getCurrentItem().index(), docWriteRequest.id(), cause) ); markAsCompleted(executionResult); } @@ -268,7 +268,6 @@ public void markOperationAsExecuted(Engine.Result result) { Engine.IndexResult indexResult = (Engine.IndexResult) result; response = new IndexResponse( primary.shardId(), - requestToExecute.type(), requestToExecute.id(), result.getSeqNo(), result.getTerm(), @@ -279,7 +278,6 @@ public void markOperationAsExecuted(Engine.Result result) { Engine.DeleteResult deleteResult = (Engine.DeleteResult) result; response = new DeleteResponse( primary.shardId(), - requestToExecute.type(), requestToExecute.id(), deleteResult.getSeqNo(), result.getTerm(), @@ -304,7 +302,6 @@ public void markOperationAsExecuted(Engine.Result result) { // concrete index instead of an alias if used! new BulkItemResponse.Failure( request.index(), - docWriteRequest.type(), docWriteRequest.id(), result.getFailure(), result.getSeqNo(), diff --git a/server/src/main/java/org/opensearch/action/bulk/BulkProcessor.java b/server/src/main/java/org/opensearch/action/bulk/BulkProcessor.java index 936604d84a15d..90a177119cfd8 100644 --- a/server/src/main/java/org/opensearch/action/bulk/BulkProcessor.java +++ b/server/src/main/java/org/opensearch/action/bulk/BulkProcessor.java @@ -106,7 +106,6 @@ public static class Builder { private TimeValue flushInterval = null; private BackoffPolicy backoffPolicy = BackoffPolicy.exponentialBackoff(); private String globalIndex; - private String globalType; private String globalRouting; private String globalPipeline; @@ -168,11 +167,6 @@ public Builder setGlobalIndex(String globalIndex) { return this; } - public Builder setGlobalType(String globalType) { - this.globalType = globalType; - return this; - } - public Builder setGlobalRouting(String globalRouting) { this.globalRouting = globalRouting; return this; @@ -219,7 +213,7 @@ public BulkProcessor build() { } private Supplier createBulkRequestWithGlobalDefaults() { - return () -> new BulkRequest(globalIndex, globalType).pipeline(globalPipeline).routing(globalRouting); + return () -> new BulkRequest(globalIndex).pipeline(globalPipeline).routing(globalRouting); } } @@ -452,9 +446,8 @@ private void internalAdd(DocWriteRequest request) { /** * Adds the data from the bytes to be processed by the bulk processor */ - public BulkProcessor add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, XContentType xContentType) - throws Exception { - return add(data, defaultIndex, defaultType, null, xContentType); + public BulkProcessor add(BytesReference data, @Nullable String defaultIndex, XContentType xContentType) throws Exception { + return add(data, defaultIndex, null, xContentType); } /** @@ -463,7 +456,6 @@ public BulkProcessor add(BytesReference data, @Nullable String defaultIndex, @Nu public BulkProcessor add( BytesReference data, @Nullable String defaultIndex, - @Nullable String defaultType, @Nullable String defaultPipeline, XContentType xContentType ) throws Exception { @@ -471,7 +463,7 @@ public BulkProcessor add( lock.lock(); try { ensureOpen(); - bulkRequest.add(data, defaultIndex, defaultType, null, null, defaultPipeline, null, true, xContentType); + bulkRequest.add(data, defaultIndex, null, null, defaultPipeline, null, true, xContentType); bulkRequestToExecute = newBulkRequestIfNeeded(); } finally { lock.unlock(); diff --git a/server/src/main/java/org/opensearch/action/bulk/BulkRequest.java b/server/src/main/java/org/opensearch/action/bulk/BulkRequest.java index a05f5dac2eb1b..e3bf5bced5072 100644 --- a/server/src/main/java/org/opensearch/action/bulk/BulkRequest.java +++ b/server/src/main/java/org/opensearch/action/bulk/BulkRequest.java @@ -52,7 +52,6 @@ import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.index.mapper.MapperService; import org.opensearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; @@ -92,7 +91,6 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques private String globalPipeline; private String globalRouting; private String globalIndex; - private String globalType; private Boolean globalRequireAlias; private long sizeInBytes = 0; @@ -111,15 +109,6 @@ public BulkRequest(@Nullable String globalIndex) { this.globalIndex = globalIndex; } - /** - * @deprecated Types are in the process of being removed. Use {@link #BulkRequest(String)} instead - */ - @Deprecated - public BulkRequest(@Nullable String globalIndex, @Nullable String globalType) { - this.globalIndex = globalIndex; - this.globalType = globalType; - } - /** * Adds a list of requests to be executed. Either index or delete requests. */ @@ -246,62 +235,21 @@ public long estimatedSizeInBytes() { * Adds a framed data in binary format */ public BulkRequest add(byte[] data, int from, int length, XContentType xContentType) throws IOException { - return add(data, from, length, null, null, xContentType); - } - - /** - * Adds a framed data in binary format - * @deprecated use {@link #add(byte[], int, int, String, XContentType)} instead - */ - @Deprecated - public BulkRequest add( - byte[] data, - int from, - int length, - @Nullable String defaultIndex, - @Nullable String defaultType, - XContentType xContentType - ) throws IOException { - return add(new BytesArray(data, from, length), defaultIndex, defaultType, xContentType); + return add(data, from, length, null, xContentType); } /** * Adds a framed data in binary format */ public BulkRequest add(byte[] data, int from, int length, @Nullable String defaultIndex, XContentType xContentType) throws IOException { - return add(new BytesArray(data, from, length), defaultIndex, MapperService.SINGLE_MAPPING_NAME, xContentType); - } - - /** - * Adds a framed data in binary format - * @deprecated use {@link #add(BytesReference, String, XContentType)} instead - */ - @Deprecated - public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, XContentType xContentType) - throws IOException { - return add(data, defaultIndex, defaultType, null, null, null, null, true, xContentType); + return add(new BytesArray(data, from, length), defaultIndex, xContentType); } /** * Adds a framed data in binary format */ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, XContentType xContentType) throws IOException { - return add(data, defaultIndex, MapperService.SINGLE_MAPPING_NAME, null, null, null, null, true, xContentType); - } - - /** - * Adds a framed data in binary format - * @deprecated use {@link #add(BytesReference, String, boolean, XContentType)} instead - */ - @Deprecated - public BulkRequest add( - BytesReference data, - @Nullable String defaultIndex, - @Nullable String defaultType, - boolean allowExplicitIndex, - XContentType xContentType - ) throws IOException { - return add(data, defaultIndex, defaultType, null, null, null, null, allowExplicitIndex, xContentType); + return add(data, defaultIndex, null, null, null, null, true, xContentType); } /** @@ -309,7 +257,7 @@ public BulkRequest add( */ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, boolean allowExplicitIndex, XContentType xContentType) throws IOException { - return add(data, defaultIndex, MapperService.SINGLE_MAPPING_NAME, null, null, null, null, allowExplicitIndex, xContentType); + return add(data, defaultIndex, null, null, null, null, allowExplicitIndex, xContentType); } public BulkRequest add( @@ -321,27 +269,12 @@ public BulkRequest add( boolean allowExplicitIndex, XContentType xContentType ) throws IOException { - return add( - data, - defaultIndex, - MapperService.SINGLE_MAPPING_NAME, - defaultRouting, - defaultFetchSourceContext, - defaultPipeline, - null, - allowExplicitIndex, - xContentType - ); + return add(data, defaultIndex, defaultRouting, defaultFetchSourceContext, defaultPipeline, null, allowExplicitIndex, xContentType); } - /** - * @deprecated use {@link #add(BytesReference, String, String, FetchSourceContext, String, boolean, XContentType)} instead - */ - @Deprecated public BulkRequest add( BytesReference data, @Nullable String defaultIndex, - @Nullable String defaultType, @Nullable String defaultRouting, @Nullable FetchSourceContext defaultFetchSourceContext, @Nullable String defaultPipeline, @@ -355,14 +288,13 @@ public BulkRequest add( new BulkRequestParser(true).parse( data, defaultIndex, - defaultType, routing, defaultFetchSourceContext, pipeline, requireAlias, allowExplicitIndex, xContentType, - this::internalAdd, + (indexRequest, type) -> internalAdd(indexRequest), this::internalAdd, this::add ); @@ -526,9 +458,6 @@ public String getDescription() { private void applyGlobalMandatoryParameters(DocWriteRequest request) { request.index(valueOrDefault(request.index(), globalIndex)); - if (Strings.isNullOrEmpty(globalType) == false && MapperService.SINGLE_MAPPING_NAME.equals(globalType) == false) { - request.defaultTypeIfNull(globalType); - } } private static String valueOrDefault(String value, String globalDefault) { diff --git a/server/src/main/java/org/opensearch/action/bulk/BulkRequestBuilder.java b/server/src/main/java/org/opensearch/action/bulk/BulkRequestBuilder.java index c2e372129e9ae..c58877e48a7eb 100644 --- a/server/src/main/java/org/opensearch/action/bulk/BulkRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/bulk/BulkRequestBuilder.java @@ -46,7 +46,6 @@ import org.opensearch.common.Nullable; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.index.mapper.MapperService; /** * A bulk request holds an ordered {@link IndexRequest}s and {@link DeleteRequest}s and allows to executes @@ -54,14 +53,6 @@ */ public class BulkRequestBuilder extends ActionRequestBuilder implements WriteRequestBuilder { - /** - * @deprecated use {@link #BulkRequestBuilder(OpenSearchClient, BulkAction, String)} instead - */ - @Deprecated - public BulkRequestBuilder(OpenSearchClient client, BulkAction action, @Nullable String globalIndex, @Nullable String globalType) { - super(client, action, new BulkRequest(globalIndex, globalType)); - } - public BulkRequestBuilder(OpenSearchClient client, BulkAction action, @Nullable String globalIndex) { super(client, action, new BulkRequest(globalIndex)); } @@ -128,29 +119,12 @@ public BulkRequestBuilder add(byte[] data, int from, int length, XContentType xC return this; } - /** - * Adds a framed data in binary format - * @deprecated use {@link #add(byte[], int, int, String, XContentType)} instead - */ - @Deprecated - public BulkRequestBuilder add( - byte[] data, - int from, - int length, - @Nullable String defaultIndex, - @Nullable String defaultType, - XContentType xContentType - ) throws Exception { - request.add(data, from, length, defaultIndex, defaultType, xContentType); - return this; - } - /** * Adds a framed data in binary format */ public BulkRequestBuilder add(byte[] data, int from, int length, @Nullable String defaultIndex, XContentType xContentType) throws Exception { - request.add(data, from, length, defaultIndex, MapperService.SINGLE_MAPPING_NAME, xContentType); + request.add(data, from, length, defaultIndex, xContentType); return this; } diff --git a/server/src/main/java/org/opensearch/action/bulk/BulkRequestParser.java b/server/src/main/java/org/opensearch/action/bulk/BulkRequestParser.java index 488c667d000d8..042e104f70c7f 100644 --- a/server/src/main/java/org/opensearch/action/bulk/BulkRequestParser.java +++ b/server/src/main/java/org/opensearch/action/bulk/BulkRequestParser.java @@ -40,7 +40,6 @@ import org.opensearch.common.ParseField; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.NamedXContentRegistry; @@ -49,12 +48,12 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.VersionType; import org.opensearch.index.seqno.SequenceNumbers; -import org.opensearch.rest.action.document.RestBulkAction; import org.opensearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; import java.util.HashMap; import java.util.Map; +import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; @@ -65,8 +64,6 @@ */ public final class BulkRequestParser { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(BulkRequestParser.class); - private static final ParseField INDEX = new ParseField("_index"); private static final ParseField TYPE = new ParseField("_type"); private static final ParseField ID = new ParseField("_id"); @@ -81,14 +78,15 @@ public final class BulkRequestParser { private static final ParseField IF_PRIMARY_TERM = new ParseField("if_primary_term"); private static final ParseField REQUIRE_ALIAS = new ParseField(DocWriteRequest.REQUIRE_ALIAS); - private final boolean warnOnTypeUsage; + // TODO: Remove this parameter once the BulkMonitoring endpoint has been removed + private final boolean errorOnType; /** * Create a new parser. - * @param warnOnTypeUsage whether it warns upon types being explicitly specified + * @param errorOnType whether to allow _type information in the index line; used by BulkMonitoring */ - public BulkRequestParser(boolean warnOnTypeUsage) { - this.warnOnTypeUsage = warnOnTypeUsage; + public BulkRequestParser(boolean errorOnType) { + this.errorOnType = errorOnType; } private static int findNextMarker(byte marker, int from, BytesReference data) { @@ -136,45 +134,7 @@ public void parse( @Nullable Boolean defaultRequireAlias, boolean allowExplicitIndex, XContentType xContentType, - Consumer indexRequestConsumer, - Consumer updateRequestConsumer, - Consumer deleteRequestConsumer - ) throws IOException { - parse( - data, - defaultIndex, - null, - defaultRouting, - defaultFetchSourceContext, - defaultPipeline, - defaultRequireAlias, - allowExplicitIndex, - xContentType, - indexRequestConsumer, - updateRequestConsumer, - deleteRequestConsumer - ); - } - - /** - * Parse the provided {@code data} assuming the provided default values. Index requests - * will be passed to the {@code indexRequestConsumer}, update requests to the - * {@code updateRequestConsumer} and delete requests to the {@code deleteRequestConsumer}. - * @deprecated Use {@link #parse(BytesReference, String, String, FetchSourceContext, String, Boolean, boolean, XContentType, - * Consumer, Consumer, Consumer)} instead. - */ - @Deprecated - public void parse( - BytesReference data, - @Nullable String defaultIndex, - @Nullable String defaultType, - @Nullable String defaultRouting, - @Nullable FetchSourceContext defaultFetchSourceContext, - @Nullable String defaultPipeline, - @Nullable Boolean defaultRequireAlias, - boolean allowExplicitIndex, - XContentType xContentType, - Consumer indexRequestConsumer, + BiConsumer indexRequestConsumer, Consumer updateRequestConsumer, Consumer deleteRequestConsumer ) throws IOException { @@ -182,7 +142,6 @@ public void parse( int line = 0; int from = 0; byte marker = xContent.streamSeparator(); - boolean typesDeprecationLogged = false; // Bulk requests can contain a lot of repeated strings for the index, pipeline and routing parameters. This map is used to // deduplicate duplicate strings parsed for these parameters. While it does not prevent instantiating the duplicate strings, it // reduces their lifetime to the lifetime of this parse call instead of the lifetime of the full bulk request. @@ -231,7 +190,7 @@ public void parse( String action = parser.currentName(); String index = defaultIndex; - String type = defaultType; + String type = null; String id = null; String routing = defaultRouting; FetchSourceContext fetchSourceContext = defaultFetchSourceContext; @@ -255,14 +214,15 @@ public void parse( currentFieldName = parser.currentName(); } else if (token.isValue()) { if (INDEX.match(currentFieldName, parser.getDeprecationHandler())) { - if (!allowExplicitIndex) { + if (allowExplicitIndex == false) { throw new IllegalArgumentException("explicit index in bulk is not allowed"); } index = stringDeduplicator.computeIfAbsent(parser.text(), Function.identity()); } else if (TYPE.match(currentFieldName, parser.getDeprecationHandler())) { - if (warnOnTypeUsage && typesDeprecationLogged == false) { - deprecationLogger.deprecate("bulk_with_types", RestBulkAction.TYPES_DEPRECATION_MESSAGE); - typesDeprecationLogged = true; + if (errorOnType) { + throw new IllegalArgumentException( + "Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]" + ); } type = stringDeduplicator.computeIfAbsent(parser.text(), Function.identity()); } else if (ID.match(currentFieldName, parser.getDeprecationHandler())) { @@ -333,7 +293,8 @@ public void parse( if ("delete".equals(action)) { deleteRequestConsumer.accept( - new DeleteRequest(index, type, id).routing(routing) + new DeleteRequest(index).id(id) + .routing(routing) .version(version) .versionType(versionType) .setIfSeqNo(ifSeqNo) @@ -351,18 +312,21 @@ public void parse( if ("index".equals(action)) { if (opType == null) { indexRequestConsumer.accept( - new IndexRequest(index, type, id).routing(routing) + new IndexRequest(index).id(id) + .routing(routing) .version(version) .versionType(versionType) .setPipeline(pipeline) .setIfSeqNo(ifSeqNo) .setIfPrimaryTerm(ifPrimaryTerm) .source(sliceTrimmingCarriageReturn(data, from, nextMarker, xContentType), xContentType) - .setRequireAlias(requireAlias) + .setRequireAlias(requireAlias), + type ); } else { indexRequestConsumer.accept( - new IndexRequest(index, type, id).routing(routing) + new IndexRequest(index).id(id) + .routing(routing) .version(version) .versionType(versionType) .create("create".equals(opType)) @@ -370,12 +334,14 @@ public void parse( .setIfSeqNo(ifSeqNo) .setIfPrimaryTerm(ifPrimaryTerm) .source(sliceTrimmingCarriageReturn(data, from, nextMarker, xContentType), xContentType) - .setRequireAlias(requireAlias) + .setRequireAlias(requireAlias), + type ); } } else if ("create".equals(action)) { indexRequestConsumer.accept( - new IndexRequest(index, type, id).routing(routing) + new IndexRequest(index).id(id) + .routing(routing) .version(version) .versionType(versionType) .create(true) @@ -383,7 +349,8 @@ public void parse( .setIfSeqNo(ifSeqNo) .setIfPrimaryTerm(ifPrimaryTerm) .source(sliceTrimmingCarriageReturn(data, from, nextMarker, xContentType), xContentType) - .setRequireAlias(requireAlias) + .setRequireAlias(requireAlias), + type ); } else if ("update".equals(action)) { if (version != Versions.MATCH_ANY || versionType != VersionType.INTERNAL) { @@ -391,7 +358,9 @@ public void parse( "Update requests do not support versioning. " + "Please use `if_seq_no` and `if_primary_term` instead" ); } - UpdateRequest updateRequest = new UpdateRequest(index, type, id).routing(routing) + UpdateRequest updateRequest = new UpdateRequest().index(index) + .id(id) + .routing(routing) .retryOnConflict(retryOnConflict) .setIfSeqNo(ifSeqNo) .setIfPrimaryTerm(ifPrimaryTerm) diff --git a/server/src/main/java/org/opensearch/action/bulk/BulkResponse.java b/server/src/main/java/org/opensearch/action/bulk/BulkResponse.java index ef76913d9e1f3..751ad567c8639 100644 --- a/server/src/main/java/org/opensearch/action/bulk/BulkResponse.java +++ b/server/src/main/java/org/opensearch/action/bulk/BulkResponse.java @@ -129,8 +129,6 @@ public String buildFailureMessage() { .append(i) .append("]: index [") .append(response.getIndex()) - .append("], type [") - .append(response.getType()) .append("], id [") .append(response.getId()) .append("], message [") diff --git a/server/src/main/java/org/opensearch/action/bulk/MappingUpdatePerformer.java b/server/src/main/java/org/opensearch/action/bulk/MappingUpdatePerformer.java index aa24c19bb3e95..c0eb29e4c112f 100644 --- a/server/src/main/java/org/opensearch/action/bulk/MappingUpdatePerformer.java +++ b/server/src/main/java/org/opensearch/action/bulk/MappingUpdatePerformer.java @@ -41,6 +41,6 @@ public interface MappingUpdatePerformer { /** * Update the mappings on the master. */ - void updateMappings(Mapping update, ShardId shardId, String type, ActionListener listener); + void updateMappings(Mapping update, ShardId shardId, ActionListener listener); } diff --git a/server/src/main/java/org/opensearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/opensearch/action/bulk/TransportBulkAction.java index e3d1ba3f834aa..560fd1d8a45b3 100644 --- a/server/src/main/java/org/opensearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/opensearch/action/bulk/TransportBulkAction.java @@ -468,10 +468,7 @@ private boolean setResponseFailureIfIndexMatches( Exception e ) { if (index.equals(request.index())) { - responses.set( - idx, - new BulkItemResponse(idx, request.opType(), new BulkItemResponse.Failure(request.index(), request.type(), request.id(), e)) - ); + responses.set(idx, new BulkItemResponse(idx, request.opType(), new BulkItemResponse.Failure(request.index(), request.id(), e))); return true; } return false; @@ -552,7 +549,7 @@ protected void doRun() { prohibitCustomRoutingOnDataStream(docWriteRequest, metadata); IndexRequest indexRequest = (IndexRequest) docWriteRequest; final IndexMetadata indexMetadata = metadata.index(concreteIndex); - MappingMetadata mappingMd = indexMetadata.mappingOrDefault(); + MappingMetadata mappingMd = indexMetadata.mapping(); Version indexCreated = indexMetadata.getCreationVersion(); indexRequest.resolveRouting(metadata); indexRequest.process(indexCreated, mappingMd, concreteIndex.getName()); @@ -568,19 +565,14 @@ protected void doRun() { docWriteRequest.routing(metadata.resolveWriteIndexRouting(docWriteRequest.routing(), docWriteRequest.index())); // check if routing is required, if so, throw error if routing wasn't specified if (docWriteRequest.routing() == null && metadata.routingRequired(concreteIndex.getName())) { - throw new RoutingMissingException(concreteIndex.getName(), docWriteRequest.type(), docWriteRequest.id()); + throw new RoutingMissingException(concreteIndex.getName(), docWriteRequest.id()); } break; default: throw new AssertionError("request type not supported: [" + docWriteRequest.opType() + "]"); } } catch (OpenSearchParseException | IllegalArgumentException | RoutingMissingException e) { - BulkItemResponse.Failure failure = new BulkItemResponse.Failure( - concreteIndex.getName(), - docWriteRequest.type(), - docWriteRequest.id(), - e - ); + BulkItemResponse.Failure failure = new BulkItemResponse.Failure(concreteIndex.getName(), docWriteRequest.id(), e); BulkItemResponse bulkItemResponse = new BulkItemResponse(i, docWriteRequest.opType(), failure); responses.set(i, bulkItemResponse); // make sure the request gets never processed again @@ -659,7 +651,7 @@ public void onFailure(Exception e) { new BulkItemResponse( request.id(), docWriteRequest.opType(), - new BulkItemResponse.Failure(indexName, docWriteRequest.type(), docWriteRequest.id(), e) + new BulkItemResponse.Failure(indexName, docWriteRequest.id(), e) ) ); } @@ -759,12 +751,7 @@ private boolean addFailureIfIndexIsUnavailable( } private void addFailure(DocWriteRequest request, int idx, Exception unavailableException) { - BulkItemResponse.Failure failure = new BulkItemResponse.Failure( - request.index(), - request.type(), - request.id(), - unavailableException - ); + BulkItemResponse.Failure failure = new BulkItemResponse.Failure(request.index(), request.id(), unavailableException); BulkItemResponse bulkItemResponse = new BulkItemResponse(idx, request.opType(), failure); responses.set(idx, bulkItemResponse); // make sure the request gets never processed again @@ -962,7 +949,6 @@ synchronized void markItemAsDropped(int slot) { indexRequest.opType(), new UpdateResponse( new ShardId(indexRequest.index(), IndexMetadata.INDEX_UUID_NA_VALUE, 0), - indexRequest.type(), id, SequenceNumbers.UNASSIGNED_SEQ_NO, SequenceNumbers.UNASSIGNED_PRIMARY_TERM, @@ -978,10 +964,9 @@ synchronized void markItemAsFailed(int slot, Exception e) { logger.debug( String.format( Locale.ROOT, - "failed to execute pipeline [%s] for document [%s/%s/%s]", + "failed to execute pipeline [%s] for document [%s/%s]", indexRequest.getPipeline(), indexRequest.index(), - indexRequest.type(), indexRequest.id() ), e @@ -992,12 +977,7 @@ synchronized void markItemAsFailed(int slot, Exception e) { // 2) Add a bulk item failure for this request // 3) Continue with the next request in the bulk. failedSlots.set(slot); - BulkItemResponse.Failure failure = new BulkItemResponse.Failure( - indexRequest.index(), - indexRequest.type(), - indexRequest.id(), - e - ); + BulkItemResponse.Failure failure = new BulkItemResponse.Failure(indexRequest.index(), indexRequest.id(), e); itemResponses.add(new BulkItemResponse(slot, indexRequest.opType(), failure)); } diff --git a/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java index 1ce4a346e5dc3..cc9f20b7aa256 100644 --- a/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java @@ -162,10 +162,10 @@ protected void dispatchedShardOperationOnPrimary( ActionListener> listener ) { ClusterStateObserver observer = new ClusterStateObserver(clusterService, request.timeout(), logger, threadPool.getThreadContext()); - performOnPrimary(request, primary, updateHelper, threadPool::absoluteTimeInMillis, (update, shardId, type, mappingListener) -> { + performOnPrimary(request, primary, updateHelper, threadPool::absoluteTimeInMillis, (update, shardId, mappingListener) -> { assert update != null; assert shardId != null; - mappingUpdatedAction.updateMappingOnMaster(shardId.getIndex(), type, update, mappingListener); + mappingUpdatedAction.updateMappingOnMaster(shardId.getIndex(), update, mappingListener); }, mappingUpdateListener -> observer.waitForNextChange(new ClusterStateObserver.Listener() { @Override public void onNewClusterState(ClusterState state) { @@ -311,7 +311,7 @@ static boolean executeBulkItemRequest( case UPDATED: IndexRequest indexRequest = updateResult.action(); IndexMetadata metadata = context.getPrimary().indexSettings().getIndexMetadata(); - MappingMetadata mappingMd = metadata.mappingOrDefault(); + MappingMetadata mappingMd = metadata.mapping(); indexRequest.process(metadata.getCreationVersion(), mappingMd, updateRequest.concreteIndex()); context.setRequestToExecute(indexRequest); break; @@ -340,7 +340,6 @@ static boolean executeBulkItemRequest( final DeleteRequest request = context.getRequestToExecute(); result = primary.applyDeleteOperationOnPrimary( version, - request.type(), request.id(), request.versionType(), request.ifSeqNo(), @@ -351,14 +350,7 @@ static boolean executeBulkItemRequest( result = primary.applyIndexOperationOnPrimary( version, request.versionType(), - new SourceToParse( - request.index(), - request.type(), - request.id(), - request.source(), - request.getContentType(), - request.routing() - ), + new SourceToParse(request.index(), request.id(), request.source(), request.getContentType(), request.routing()), request.ifSeqNo(), request.ifPrimaryTerm(), request.getAutoGeneratedTimestamp(), @@ -370,8 +362,8 @@ static boolean executeBulkItemRequest( try { primary.mapperService() .merge( - context.getRequestToExecute().type(), - new CompressedXContent(result.getRequiredMappingUpdate(), XContentType.JSON, ToXContent.EMPTY_PARAMS), + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(result.getRequiredMappingUpdate(), ToXContent.EMPTY_PARAMS), MapperService.MergeReason.MAPPING_UPDATE_PREFLIGHT ); } catch (Exception e) { @@ -380,37 +372,32 @@ static boolean executeBulkItemRequest( return true; } - mappingUpdater.updateMappings( - result.getRequiredMappingUpdate(), - primary.shardId(), - context.getRequestToExecute().type(), - new ActionListener() { - @Override - public void onResponse(Void v) { - context.markAsRequiringMappingUpdate(); - waitForMappingUpdate.accept(ActionListener.runAfter(new ActionListener() { - @Override - public void onResponse(Void v) { - assert context.requiresWaitingForMappingUpdate(); - context.resetForExecutionForRetry(); - } - - @Override - public void onFailure(Exception e) { - context.failOnMappingUpdate(e); - } - }, () -> itemDoneListener.onResponse(null))); - } + mappingUpdater.updateMappings(result.getRequiredMappingUpdate(), primary.shardId(), new ActionListener() { + @Override + public void onResponse(Void v) { + context.markAsRequiringMappingUpdate(); + waitForMappingUpdate.accept(ActionListener.runAfter(new ActionListener() { + @Override + public void onResponse(Void v) { + assert context.requiresWaitingForMappingUpdate(); + context.resetForExecutionForRetry(); + } - @Override - public void onFailure(Exception e) { - onComplete(exceptionToResult(e, primary, isDelete, version), context, updateResult); - // Requesting mapping update failed, so we don't have to wait for a cluster state update - assert context.isInitial(); - itemDoneListener.onResponse(null); - } + @Override + public void onFailure(Exception e) { + context.failOnMappingUpdate(e); + } + }, () -> itemDoneListener.onResponse(null))); } - ); + + @Override + public void onFailure(Exception e) { + onComplete(exceptionToResult(e, primary, isDelete, version), context, updateResult); + // Requesting mapping update failed, so we don't have to wait for a cluster state update + assert context.isInitial(); + itemDoneListener.onResponse(null); + } + }); return false; } else { onComplete(result, context, updateResult); @@ -485,7 +472,6 @@ static BulkItemResponse processUpdateResponse( updateResponse = new UpdateResponse( indexResponse.getShardInfo(), indexResponse.getShardId(), - indexResponse.getType(), indexResponse.getId(), indexResponse.getSeqNo(), indexResponse.getPrimaryTerm(), @@ -518,7 +504,6 @@ static BulkItemResponse processUpdateResponse( updateResponse = new UpdateResponse( deleteResponse.getShardInfo(), deleteResponse.getShardId(), - deleteResponse.getType(), deleteResponse.getId(), deleteResponse.getSeqNo(), deleteResponse.getPrimaryTerm(), @@ -608,7 +593,6 @@ private static Engine.Result performOpOnReplica( final ShardId shardId = replica.shardId(); final SourceToParse sourceToParse = new SourceToParse( shardId.getIndexName(), - indexRequest.type(), indexRequest.id(), indexRequest.source(), indexRequest.getContentType(), @@ -629,7 +613,6 @@ private static Engine.Result performOpOnReplica( primaryResponse.getSeqNo(), primaryResponse.getPrimaryTerm(), primaryResponse.getVersion(), - deleteRequest.type(), deleteRequest.id() ); break; diff --git a/server/src/main/java/org/opensearch/action/delete/DeleteRequest.java b/server/src/main/java/org/opensearch/action/delete/DeleteRequest.java index b6fda084e2958..c40933ba9c92e 100644 --- a/server/src/main/java/org/opensearch/action/delete/DeleteRequest.java +++ b/server/src/main/java/org/opensearch/action/delete/DeleteRequest.java @@ -34,6 +34,7 @@ import org.apache.lucene.util.RamUsageEstimator; import org.opensearch.LegacyESVersion; +import org.opensearch.Version; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.CompositeIndicesRequest; import org.opensearch.action.DocWriteRequest; @@ -57,7 +58,7 @@ * A request to delete a document from an index based on its type and id. Best created using * {@link org.opensearch.client.Requests#deleteRequest(String)}. *

        - * The operation requires the {@link #index()}, {@link #type(String)} and {@link #id(String)} to + * The operation requires the {@link #index()}, and {@link #id(String)} to * be set. * * @see DeleteResponse @@ -73,8 +74,6 @@ public class DeleteRequest extends ReplicatedWriteRequest private static final ShardId NO_SHARD_ID = null; - // Set to null initially so we can know to override in bulk requests that have a default type. - private String type; private String id; @Nullable private String routing; @@ -89,7 +88,10 @@ public DeleteRequest(StreamInput in) throws IOException { public DeleteRequest(@Nullable ShardId shardId, StreamInput in) throws IOException { super(shardId, in); - type = in.readString(); + if (in.getVersion().before(Version.V_2_0_0)) { + String type = in.readString(); + assert MapperService.SINGLE_MAPPING_NAME.equals(type) : "Expected [_doc] but received [" + type + "]"; + } id = in.readString(); routing = in.readOptionalString(); if (in.getVersion().before(LegacyESVersion.V_7_0_0)) { @@ -106,7 +108,7 @@ public DeleteRequest() { } /** - * Constructs a new delete request against the specified index. The {@link #type(String)} and {@link #id(String)} + * Constructs a new delete request against the specified index. The {@link #id(String)} * must be set. */ public DeleteRequest(String index) { @@ -114,23 +116,6 @@ public DeleteRequest(String index) { this.index = index; } - /** - * Constructs a new delete request against the specified index with the type and id. - * - * @param index The index to get the document from - * @param type The type of the document - * @param id The id of the document - * - * @deprecated Types are in the process of being removed. Use {@link #DeleteRequest(String, String)} instead. - */ - @Deprecated - public DeleteRequest(String index, String type, String id) { - super(NO_SHARD_ID); - this.index = index; - this.type = type; - this.id = id; - } - /** * Constructs a new delete request against the specified index and id. * @@ -146,9 +131,6 @@ public DeleteRequest(String index, String id) { @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = super.validate(); - if (Strings.isEmpty(type())) { - validationException = addValidationError("type is missing", validationException); - } if (Strings.isEmpty(id)) { validationException = addValidationError("id is missing", validationException); } @@ -158,48 +140,6 @@ public ActionRequestValidationException validate() { return validationException; } - /** - * The type of the document to delete. - * - * @deprecated Types are in the process of being removed. - */ - @Deprecated - @Override - public String type() { - if (type == null) { - return MapperService.SINGLE_MAPPING_NAME; - } - return type; - } - - /** - * Sets the type of the document to delete. - * - * @deprecated Types are in the process of being removed. - */ - @Deprecated - @Override - public DeleteRequest type(String type) { - this.type = type; - return this; - } - - /** - * Set the default type supplied to a bulk - * request if this individual request's type is null - * or empty - * - * @deprecated Types are in the process of being removed. - */ - @Deprecated - @Override - public DeleteRequest defaultTypeIfNull(String defaultType) { - if (Strings.isNullOrEmpty(type)) { - type = defaultType; - } - return this; - } - /** * The id of the document to delete. */ @@ -333,9 +273,9 @@ public void writeThin(StreamOutput out) throws IOException { } private void writeBody(StreamOutput out) throws IOException { - // A 7.x request allows null types but if deserialized in a 6.x node will cause nullpointer exceptions. - // So we use the type accessor method here to make the type non-null (will default it to "_doc"). - out.writeString(type()); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeString(MapperService.SINGLE_MAPPING_NAME); + } out.writeString(id); out.writeOptionalString(routing()); if (out.getVersion().before(LegacyESVersion.V_7_0_0)) { @@ -349,7 +289,7 @@ private void writeBody(StreamOutput out) throws IOException { @Override public String toString() { - return "delete {[" + index + "][" + type() + "][" + id + "]}"; + return "delete {[" + index + "][" + id + "]}"; } @Override diff --git a/server/src/main/java/org/opensearch/action/delete/DeleteRequestBuilder.java b/server/src/main/java/org/opensearch/action/delete/DeleteRequestBuilder.java index f3d15cb9b0555..f6ee0f4a7b278 100644 --- a/server/src/main/java/org/opensearch/action/delete/DeleteRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/delete/DeleteRequestBuilder.java @@ -53,14 +53,6 @@ public DeleteRequestBuilder(OpenSearchClient client, DeleteAction action, @Nulla super(client, action, new DeleteRequest(index)); } - /** - * Sets the type of the document to delete. - */ - public DeleteRequestBuilder setType(String type) { - request.type(type); - return this; - } - /** * Sets the id of the document to delete. */ diff --git a/server/src/main/java/org/opensearch/action/delete/DeleteResponse.java b/server/src/main/java/org/opensearch/action/delete/DeleteResponse.java index 21438313a7faa..6b000561ad282 100644 --- a/server/src/main/java/org/opensearch/action/delete/DeleteResponse.java +++ b/server/src/main/java/org/opensearch/action/delete/DeleteResponse.java @@ -58,12 +58,12 @@ public DeleteResponse(StreamInput in) throws IOException { super(in); } - public DeleteResponse(ShardId shardId, String type, String id, long seqNo, long primaryTerm, long version, boolean found) { - this(shardId, type, id, seqNo, primaryTerm, version, found ? Result.DELETED : Result.NOT_FOUND); + public DeleteResponse(ShardId shardId, String id, long seqNo, long primaryTerm, long version, boolean found) { + this(shardId, id, seqNo, primaryTerm, version, found ? Result.DELETED : Result.NOT_FOUND); } - private DeleteResponse(ShardId shardId, String type, String id, long seqNo, long primaryTerm, long version, Result result) { - super(shardId, type, id, seqNo, primaryTerm, version, assertDeletedOrNotFound(result)); + private DeleteResponse(ShardId shardId, String id, long seqNo, long primaryTerm, long version, Result result) { + super(shardId, id, seqNo, primaryTerm, version, assertDeletedOrNotFound(result)); } private static Result assertDeletedOrNotFound(Result result) { @@ -81,7 +81,6 @@ public String toString() { StringBuilder builder = new StringBuilder(); builder.append("DeleteResponse["); builder.append("index=").append(getIndex()); - builder.append(",type=").append(getType()); builder.append(",id=").append(getId()); builder.append(",version=").append(getVersion()); builder.append(",result=").append(getResult().getLowercase()); @@ -115,7 +114,7 @@ public static class Builder extends DocWriteResponse.Builder { @Override public DeleteResponse build() { - DeleteResponse deleteResponse = new DeleteResponse(shardId, type, id, seqNo, primaryTerm, version, result); + DeleteResponse deleteResponse = new DeleteResponse(shardId, id, seqNo, primaryTerm, version, result); deleteResponse.setForcedRefresh(forcedRefresh); if (shardInfo != null) { deleteResponse.setShardInfo(shardInfo); diff --git a/server/src/main/java/org/opensearch/action/explain/ExplainRequest.java b/server/src/main/java/org/opensearch/action/explain/ExplainRequest.java index d0098ea9b111a..1543c2c95b269 100644 --- a/server/src/main/java/org/opensearch/action/explain/ExplainRequest.java +++ b/server/src/main/java/org/opensearch/action/explain/ExplainRequest.java @@ -32,6 +32,7 @@ package org.opensearch.action.explain; +import org.opensearch.Version; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.ValidateActions; import org.opensearch.action.support.single.shard.SingleShardRequest; @@ -57,7 +58,6 @@ public class ExplainRequest extends SingleShardRequest implement private static final ParseField QUERY_FIELD = new ParseField("query"); - private String type = MapperService.SINGLE_MAPPING_NAME; private String id; private String routing; private String preference; @@ -71,16 +71,6 @@ public class ExplainRequest extends SingleShardRequest implement public ExplainRequest() {} - /** - * @deprecated Types are in the process of being removed. Use {@link ExplainRequest(String, String) instead.} - */ - @Deprecated - public ExplainRequest(String index, String type, String id) { - this.index = index; - this.type = type; - this.id = id; - } - public ExplainRequest(String index, String id) { this.index = index; this.id = id; @@ -88,7 +78,9 @@ public ExplainRequest(String index, String id) { ExplainRequest(StreamInput in) throws IOException { super(in); - type = in.readString(); + if (in.getVersion().before(Version.V_2_0_0)) { + in.readString(); + } id = in.readString(); routing = in.readOptionalString(); preference = in.readOptionalString(); @@ -99,23 +91,6 @@ public ExplainRequest(String index, String id) { nowInMillis = in.readVLong(); } - /** - * @deprecated Types are in the process of being removed. - */ - @Deprecated - public String type() { - return type; - } - - /** - * @deprecated Types are in the process of being removed. - */ - @Deprecated - public ExplainRequest type(String type) { - this.type = type; - return this; - } - public String id() { return id; } @@ -196,9 +171,6 @@ public ExplainRequest filteringAlias(AliasFilter filteringAlias) { @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = super.validateNonNullIndex(); - if (Strings.isEmpty(type)) { - validationException = addValidationError("type is missing", validationException); - } if (Strings.isEmpty(id)) { validationException = addValidationError("id is missing", validationException); } @@ -211,7 +183,9 @@ public ActionRequestValidationException validate() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeString(type); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeString(MapperService.SINGLE_MAPPING_NAME); + } out.writeString(id); out.writeOptionalString(routing); out.writeOptionalString(preference); diff --git a/server/src/main/java/org/opensearch/action/explain/ExplainRequestBuilder.java b/server/src/main/java/org/opensearch/action/explain/ExplainRequestBuilder.java index c161a6e639870..6839479079845 100644 --- a/server/src/main/java/org/opensearch/action/explain/ExplainRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/explain/ExplainRequestBuilder.java @@ -48,16 +48,8 @@ public class ExplainRequestBuilder extends SingleShardOperationRequestBuilder PARSER = new ConstructingObjectParser<>( "explain", true, - (arg, exists) -> new ExplainResponse( - (String) arg[0], - (String) arg[1], - (String) arg[2], - exists, - (Explanation) arg[3], - (GetResult) arg[4] - ) + (arg, exists) -> new ExplainResponse((String) arg[0], (String) arg[1], exists, (Explanation) arg[2], (GetResult) arg[3]) ); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), _INDEX); - PARSER.declareString(ConstructingObjectParser.constructorArg(), _TYPE); PARSER.declareString(ConstructingObjectParser.constructorArg(), _ID); final ConstructingObjectParser explanationParser = new ConstructingObjectParser<>( "explanation", @@ -211,7 +195,6 @@ public static ExplainResponse fromXContent(XContentParser parser, boolean exists public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(_INDEX.getPreferredName(), index); - builder.field(_TYPE.getPreferredName(), type); builder.field(_ID.getPreferredName(), id); builder.field(MATCHED.getPreferredName(), isMatch()); if (hasExplanation()) { @@ -253,7 +236,6 @@ public boolean equals(Object obj) { } ExplainResponse other = (ExplainResponse) obj; return index.equals(other.index) - && type.equals(other.type) && id.equals(other.id) && Objects.equals(explanation, other.explanation) && getResult.isExists() == other.getResult.isExists() @@ -263,6 +245,6 @@ public boolean equals(Object obj) { @Override public int hashCode() { - return Objects.hash(index, type, id, explanation, getResult.isExists(), getResult.sourceAsMap(), getResult.getFields()); + return Objects.hash(index, id, explanation, getResult.isExists(), getResult.sourceAsMap(), getResult.getFields()); } } diff --git a/server/src/main/java/org/opensearch/action/explain/TransportExplainAction.java b/server/src/main/java/org/opensearch/action/explain/TransportExplainAction.java index 3d40cb30cdee5..9fb16eec7d36b 100644 --- a/server/src/main/java/org/opensearch/action/explain/TransportExplainAction.java +++ b/server/src/main/java/org/opensearch/action/explain/TransportExplainAction.java @@ -43,7 +43,6 @@ import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.routing.ShardIterator; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.Strings; import org.opensearch.common.inject.Inject; import org.opensearch.common.io.stream.Writeable; import org.opensearch.common.lease.Releasables; @@ -51,7 +50,6 @@ import org.opensearch.index.engine.Engine; import org.opensearch.index.get.GetResult; import org.opensearch.index.mapper.IdFieldMapper; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.Uid; import org.opensearch.index.shard.IndexShard; import org.opensearch.index.shard.ShardId; @@ -116,7 +114,7 @@ protected void resolveRequest(ClusterState state, InternalRequest request) { request.request().filteringAlias(aliasFilter); // Fail fast on the node that received the request. if (request.request().routing() == null && state.getMetadata().routingRequired(request.concreteIndex())) { - throw new RoutingMissingException(request.concreteIndex(), request.request().type(), request.request().id()); + throw new RoutingMissingException(request.concreteIndex(), request.request().id()); } } @@ -136,21 +134,15 @@ protected void asyncShardOperation(ExplainRequest request, ShardId shardId, Acti @Override protected ExplainResponse shardOperation(ExplainRequest request, ShardId shardId) throws IOException { - String[] types; - if (MapperService.SINGLE_MAPPING_NAME.equals(request.type())) { // typeless explain call - types = Strings.EMPTY_ARRAY; - } else { - types = new String[] { request.type() }; - } - ShardSearchRequest shardSearchLocalRequest = new ShardSearchRequest(shardId, types, request.nowInMillis, request.filteringAlias()); + ShardSearchRequest shardSearchLocalRequest = new ShardSearchRequest(shardId, request.nowInMillis, request.filteringAlias()); SearchContext context = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT); Engine.GetResult result = null; try { // No need to check the type, IndexShard#get does it for us Term uidTerm = new Term(IdFieldMapper.NAME, Uid.encodeId(request.id())); - result = context.indexShard().get(new Engine.Get(false, false, request.type(), request.id(), uidTerm)); + result = context.indexShard().get(new Engine.Get(false, false, request.id(), uidTerm)); if (!result.exists()) { - return new ExplainResponse(shardId.getIndexName(), request.type(), request.id(), false); + return new ExplainResponse(shardId.getIndexName(), request.id(), false); } context.parsedQuery(context.getQueryShardContext().toQuery(request.query())); context.preProcess(true); @@ -166,10 +158,10 @@ protected ExplainResponse shardOperation(ExplainRequest request, ShardId shardId // doc isn't deleted between the initial get and this call. GetResult getResult = context.indexShard() .getService() - .get(result, request.id(), request.type(), request.storedFields(), request.fetchSourceContext()); - return new ExplainResponse(shardId.getIndexName(), request.type(), request.id(), true, explanation, getResult); + .get(result, request.id(), request.storedFields(), request.fetchSourceContext()); + return new ExplainResponse(shardId.getIndexName(), request.id(), true, explanation, getResult); } else { - return new ExplainResponse(shardId.getIndexName(), request.type(), request.id(), true, explanation); + return new ExplainResponse(shardId.getIndexName(), request.id(), true, explanation); } } catch (IOException e) { throw new OpenSearchException("Could not explain", e); diff --git a/server/src/main/java/org/opensearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java b/server/src/main/java/org/opensearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java index df958d11fa725..a4807eff1acb4 100644 --- a/server/src/main/java/org/opensearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java +++ b/server/src/main/java/org/opensearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java @@ -195,7 +195,7 @@ private boolean canMatchShard(FieldCapabilitiesIndexRequest req) throws IOExcept return true; } assert req.nowInMillis() != 0L; - ShardSearchRequest searchRequest = new ShardSearchRequest(req.shardId(), null, req.nowInMillis(), AliasFilter.EMPTY); + ShardSearchRequest searchRequest = new ShardSearchRequest(req.shardId(), req.nowInMillis(), AliasFilter.EMPTY); searchRequest.source(new SearchSourceBuilder().query(req.indexFilter())); return searchService.canMatch(searchRequest).canMatch(); } diff --git a/server/src/main/java/org/opensearch/action/get/GetRequest.java b/server/src/main/java/org/opensearch/action/get/GetRequest.java index 2796a8e9e47d7..9badf2db92f67 100644 --- a/server/src/main/java/org/opensearch/action/get/GetRequest.java +++ b/server/src/main/java/org/opensearch/action/get/GetRequest.java @@ -33,11 +33,11 @@ package org.opensearch.action.get; import org.opensearch.LegacyESVersion; +import org.opensearch.Version; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.RealtimeRequest; import org.opensearch.action.ValidateActions; import org.opensearch.action.support.single.shard.SingleShardRequest; -import org.opensearch.common.Nullable; import org.opensearch.common.Strings; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -54,7 +54,7 @@ * A request to get a document (its source) from an index based on its id. Best created using * {@link org.opensearch.client.Requests#getRequest(String)}. *

        - * The operation requires the {@link #index()}, {@link #type(String)} and {@link #id(String)} + * The operation requires the {@link #index()}} and {@link #id(String)} * to be set. * * @see GetResponse @@ -63,7 +63,6 @@ */ public class GetRequest extends SingleShardRequest implements RealtimeRequest { - private String type; private String id; private String routing; private String preference; @@ -79,13 +78,13 @@ public class GetRequest extends SingleShardRequest implements Realti private VersionType versionType = VersionType.INTERNAL; private long version = Versions.MATCH_ANY; - public GetRequest() { - type = MapperService.SINGLE_MAPPING_NAME; - } + public GetRequest() {} GetRequest(StreamInput in) throws IOException { super(in); - type = in.readString(); + if (in.getVersion().before(Version.V_2_0_0)) { + in.readString(); + } id = in.readString(); routing = in.readOptionalString(); if (in.getVersion().before(LegacyESVersion.V_7_0_0)) { @@ -106,22 +105,6 @@ public GetRequest() { */ public GetRequest(String index) { super(index); - this.type = MapperService.SINGLE_MAPPING_NAME; - } - - /** - * Constructs a new get request against the specified index with the type and id. - * - * @param index The index to get the document from - * @param type The type of the document - * @param id The id of the document - * @deprecated Types are in the process of being removed, use {@link GetRequest(String, String)} instead. - */ - @Deprecated - public GetRequest(String index, String type, String id) { - super(index); - this.type = type; - this.id = id; } /** @@ -133,15 +116,11 @@ public GetRequest(String index, String type, String id) { public GetRequest(String index, String id) { super(index); this.id = id; - this.type = MapperService.SINGLE_MAPPING_NAME; } @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = super.validateNonNullIndex(); - if (Strings.isEmpty(type)) { - validationException = addValidationError("type is missing", validationException); - } if (Strings.isEmpty(id)) { validationException = addValidationError("id is missing", validationException); } @@ -154,19 +133,6 @@ public ActionRequestValidationException validate() { return validationException; } - /** - * Sets the type of the document to fetch. - * @deprecated Types are in the process of being removed. - */ - @Deprecated - public GetRequest type(@Nullable String type) { - if (type == null) { - type = MapperService.SINGLE_MAPPING_NAME; - } - this.type = type; - return this; - } - /** * Sets the id of the document to fetch. */ @@ -194,14 +160,6 @@ public GetRequest preference(String preference) { return this; } - /** - * @deprecated Types are in the process of being removed. - */ - @Deprecated - public String type() { - return type; - } - public String id() { return id; } @@ -295,7 +253,9 @@ public VersionType versionType() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeString(type); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeString(MapperService.SINGLE_MAPPING_NAME); + } out.writeString(id); out.writeOptionalString(routing); if (out.getVersion().before(LegacyESVersion.V_7_0_0)) { @@ -313,7 +273,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public String toString() { - return "get [" + index + "][" + type + "][" + id + "]: routing [" + routing + "]"; + return "get [" + index + "][" + id + "]: routing [" + routing + "]"; } } diff --git a/server/src/main/java/org/opensearch/action/get/GetRequestBuilder.java b/server/src/main/java/org/opensearch/action/get/GetRequestBuilder.java index e47965595be2d..492a88b9d3821 100644 --- a/server/src/main/java/org/opensearch/action/get/GetRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/get/GetRequestBuilder.java @@ -52,15 +52,6 @@ public GetRequestBuilder(OpenSearchClient client, GetAction action, @Nullable St super(client, action, new GetRequest(index)); } - /** - * Sets the type of the document to fetch. If set to {@code null}, will use just the id to fetch the - * first document matching it. - */ - public GetRequestBuilder setType(@Nullable String type) { - request.type(type); - return this; - } - /** * Sets the id of the document to fetch. */ diff --git a/server/src/main/java/org/opensearch/action/get/GetResponse.java b/server/src/main/java/org/opensearch/action/get/GetResponse.java index b10057ed282b5..a15607d696195 100644 --- a/server/src/main/java/org/opensearch/action/get/GetResponse.java +++ b/server/src/main/java/org/opensearch/action/get/GetResponse.java @@ -84,13 +84,6 @@ public String getIndex() { return getResult.getIndex(); } - /** - * The type of the document. - */ - public String getType() { - return getResult.getType(); - } - /** * The id of the document. */ @@ -209,10 +202,10 @@ public static GetResponse fromXContent(XContentParser parser) throws IOException // At this stage we ensure that we parsed enough information to return // a valid GetResponse instance. If it's not the case, we throw an // exception so that callers know it and can handle it correctly. - if (getResult.getIndex() == null && getResult.getType() == null && getResult.getId() == null) { + if (getResult.getIndex() == null && getResult.getId() == null) { throw new ParsingException( parser.getTokenLocation(), - String.format(Locale.ROOT, "Missing required fields [%s,%s,%s]", GetResult._INDEX, GetResult._TYPE, GetResult._ID) + String.format(Locale.ROOT, "Missing required fields [%s,%s]", GetResult._INDEX, GetResult._ID) ); } return new GetResponse(getResult); diff --git a/server/src/main/java/org/opensearch/action/get/MultiGetItemResponse.java b/server/src/main/java/org/opensearch/action/get/MultiGetItemResponse.java index 4308a9223919b..1ff684fcc5872 100644 --- a/server/src/main/java/org/opensearch/action/get/MultiGetItemResponse.java +++ b/server/src/main/java/org/opensearch/action/get/MultiGetItemResponse.java @@ -71,16 +71,6 @@ public String getIndex() { return response.getIndex(); } - /** - * The type of the document. - */ - public String getType() { - if (failure != null) { - return failure.getType(); - } - return response.getType(); - } - /** * The id of the document. */ diff --git a/server/src/main/java/org/opensearch/action/get/MultiGetRequest.java b/server/src/main/java/org/opensearch/action/get/MultiGetRequest.java index 220659cfd894e..974799dd7bf4c 100644 --- a/server/src/main/java/org/opensearch/action/get/MultiGetRequest.java +++ b/server/src/main/java/org/opensearch/action/get/MultiGetRequest.java @@ -34,6 +34,7 @@ import org.opensearch.LegacyESVersion; import org.opensearch.OpenSearchParseException; +import org.opensearch.Version; import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.CompositeIndicesRequest; @@ -54,6 +55,7 @@ import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentParser.Token; import org.opensearch.index.VersionType; +import org.opensearch.index.mapper.MapperService; import org.opensearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; @@ -73,7 +75,6 @@ public class MultiGetRequest extends ActionRequest private static final ParseField DOCS = new ParseField("docs"); private static final ParseField INDEX = new ParseField("_index"); - private static final ParseField TYPE = new ParseField("_type"); private static final ParseField ID = new ParseField("_id"); private static final ParseField ROUTING = new ParseField("routing"); private static final ParseField VERSION = new ParseField("version"); @@ -88,7 +89,6 @@ public class MultiGetRequest extends ActionRequest public static class Item implements Writeable, IndicesRequest, ToXContentObject { private String index; - private String type; private String id; private String routing; private String[] storedFields; @@ -102,7 +102,9 @@ public Item() { public Item(StreamInput in) throws IOException { index = in.readString(); - type = in.readOptionalString(); + if (in.getVersion().before(Version.V_2_0_0)) { + in.readOptionalString(); + } id = in.readString(); routing = in.readOptionalString(); if (in.getVersion().before(LegacyESVersion.V_7_0_0)) { @@ -115,22 +117,6 @@ public Item(StreamInput in) throws IOException { fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new); } - /** - * Constructs a single get item. - * - * @param index The index name - * @param type The type (can be null) - * @param id The id - * - * @deprecated Types are in the process of being removed, use {@link Item(String, String) instead}. - */ - @Deprecated - public Item(String index, @Nullable String type, String id) { - this.index = index; - this.type = type; - this.id = id; - } - public Item(String index, String id) { this.index = index; this.id = id; @@ -155,10 +141,6 @@ public Item index(String index) { return this; } - public String type() { - return this.type; - } - public String id() { return this.id; } @@ -217,7 +199,9 @@ public Item fetchSourceContext(FetchSourceContext fetchSourceContext) { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(index); - out.writeOptionalString(type); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeOptionalString(MapperService.SINGLE_MAPPING_NAME); + } out.writeString(id); out.writeOptionalString(routing); if (out.getVersion().before(LegacyESVersion.V_7_0_0)) { @@ -234,7 +218,6 @@ public void writeTo(StreamOutput out) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(INDEX.getPreferredName(), index); - builder.field(TYPE.getPreferredName(), type); builder.field(ID.getPreferredName(), id); builder.field(ROUTING.getPreferredName(), routing); builder.field(STORED_FIELDS.getPreferredName(), storedFields); @@ -259,7 +242,6 @@ public boolean equals(Object o) { if (!id.equals(item.id)) return false; if (!index.equals(item.index)) return false; if (routing != null ? !routing.equals(item.routing) : item.routing != null) return false; - if (type != null ? !type.equals(item.type) : item.type != null) return false; if (versionType != item.versionType) return false; return true; @@ -268,7 +250,6 @@ public boolean equals(Object o) { @Override public int hashCode() { int result = index.hashCode(); - result = 31 * result + (type != null ? type.hashCode() : 0); result = 31 * result + id.hashCode(); result = 31 * result + (routing != null ? routing.hashCode() : 0); result = 31 * result + (storedFields != null ? Arrays.hashCode(storedFields) : 0); @@ -308,16 +289,6 @@ public MultiGetRequest add(Item item) { return this; } - /** - * @deprecated Types are in the process of being removed, use - * {@link MultiGetRequest#add(String, String)} instead. - */ - @Deprecated - public MultiGetRequest add(String index, @Nullable String type, String id) { - items.add(new Item(index, type, id)); - return this; - } - public MultiGetRequest add(String index, String id) { items.add(new Item(index, id)); return this; @@ -377,7 +348,6 @@ public MultiGetRequest refresh(boolean refresh) { public MultiGetRequest add( @Nullable String defaultIndex, - @Nullable String defaultType, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, @Nullable String defaultRouting, @@ -395,18 +365,9 @@ public MultiGetRequest add( currentFieldName = parser.currentName(); } else if (token == Token.START_ARRAY) { if ("docs".equals(currentFieldName)) { - parseDocuments( - parser, - this.items, - defaultIndex, - defaultType, - defaultFields, - defaultFetchSource, - defaultRouting, - allowExplicitIndex - ); + parseDocuments(parser, this.items, defaultIndex, defaultFields, defaultFetchSource, defaultRouting, allowExplicitIndex); } else if ("ids".equals(currentFieldName)) { - parseIds(parser, this.items, defaultIndex, defaultType, defaultFields, defaultFetchSource, defaultRouting); + parseIds(parser, this.items, defaultIndex, defaultFields, defaultFetchSource, defaultRouting); } else { final String message = String.format( Locale.ROOT, @@ -434,7 +395,6 @@ private static void parseDocuments( XContentParser parser, List items, @Nullable String defaultIndex, - @Nullable String defaultType, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, @Nullable String defaultRouting, @@ -447,7 +407,6 @@ private static void parseDocuments( throw new IllegalArgumentException("docs array element should include an object"); } String index = defaultIndex; - String type = defaultType; String id = null; String routing = defaultRouting; List storedFields = null; @@ -465,8 +424,6 @@ private static void parseDocuments( throw new IllegalArgumentException("explicit index in multi get is not allowed"); } index = parser.text(); - } else if (TYPE.match(currentFieldName, parser.getDeprecationHandler())) { - type = parser.text(); } else if (ID.match(currentFieldName, parser.getDeprecationHandler())) { id = parser.text(); } else if (ROUTING.match(currentFieldName, parser.getDeprecationHandler())) { @@ -565,7 +522,7 @@ private static void parseDocuments( aFields = defaultFields; } items.add( - new Item(index, type, id).routing(routing) + new Item(index, id).routing(routing) .storedFields(aFields) .version(version) .versionType(versionType) @@ -578,7 +535,6 @@ public static void parseIds( XContentParser parser, List items, @Nullable String defaultIndex, - @Nullable String defaultType, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, @Nullable String defaultRouting @@ -589,7 +545,7 @@ public static void parseIds( throw new IllegalArgumentException("ids array element should only contain ids"); } items.add( - new Item(defaultIndex, defaultType, parser.text()).storedFields(defaultFields) + new Item(defaultIndex, parser.text()).storedFields(defaultFields) .fetchSourceContext(defaultFetchSource) .routing(defaultRouting) ); diff --git a/server/src/main/java/org/opensearch/action/get/MultiGetRequestBuilder.java b/server/src/main/java/org/opensearch/action/get/MultiGetRequestBuilder.java index a068e4c66e5fa..56ac6cbd1b8c9 100644 --- a/server/src/main/java/org/opensearch/action/get/MultiGetRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/get/MultiGetRequestBuilder.java @@ -34,7 +34,6 @@ import org.opensearch.action.ActionRequestBuilder; import org.opensearch.client.OpenSearchClient; -import org.opensearch.common.Nullable; /** * A multi get document action request builder. @@ -45,21 +44,21 @@ public MultiGetRequestBuilder(OpenSearchClient client, MultiGetAction action) { super(client, action, new MultiGetRequest()); } - public MultiGetRequestBuilder add(String index, @Nullable String type, String id) { - request.add(index, type, id); + public MultiGetRequestBuilder add(String index, String id) { + request.add(index, id); return this; } - public MultiGetRequestBuilder add(String index, @Nullable String type, Iterable ids) { + public MultiGetRequestBuilder add(String index, Iterable ids) { for (String id : ids) { - request.add(index, type, id); + request.add(index, id); } return this; } - public MultiGetRequestBuilder add(String index, @Nullable String type, String... ids) { + public MultiGetRequestBuilder add(String index, String... ids) { for (String id : ids) { - request.add(index, type, id); + request.add(index, id); } return this; } diff --git a/server/src/main/java/org/opensearch/action/get/MultiGetResponse.java b/server/src/main/java/org/opensearch/action/get/MultiGetResponse.java index a5cf07c32b3e9..ca6249861dd50 100644 --- a/server/src/main/java/org/opensearch/action/get/MultiGetResponse.java +++ b/server/src/main/java/org/opensearch/action/get/MultiGetResponse.java @@ -33,6 +33,7 @@ package org.opensearch.action.get; import org.opensearch.OpenSearchException; +import org.opensearch.Version; import org.opensearch.action.ActionResponse; import org.opensearch.common.ParseField; import org.opensearch.common.io.stream.StreamInput; @@ -43,6 +44,7 @@ import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentParser.Token; import org.opensearch.index.get.GetResult; +import org.opensearch.index.mapper.MapperService; import java.io.IOException; import java.util.ArrayList; @@ -53,7 +55,6 @@ public class MultiGetResponse extends ActionResponse implements Iterable, ToXContentObject { private static final ParseField INDEX = new ParseField("_index"); - private static final ParseField TYPE = new ParseField("_type"); private static final ParseField ID = new ParseField("_id"); private static final ParseField ERROR = new ParseField("error"); private static final ParseField DOCS = new ParseField("docs"); @@ -64,20 +65,20 @@ public class MultiGetResponse extends ActionResponse implements Iterable new ParameterizedMessage("{} failed to execute multi_get for [{}]/[{}]", shardId, item.type(), item.id()), - e - ); - response.add(request.locations.get(i), new MultiGetResponse.Failure(request.index(), item.type(), item.id(), e)); + logger.debug(() -> new ParameterizedMessage("{} failed to execute multi_get for [{}]", shardId, item.id()), e); + response.add(request.locations.get(i), new MultiGetResponse.Failure(request.index(), item.id(), e)); } } } diff --git a/server/src/main/java/org/opensearch/action/index/IndexRequest.java b/server/src/main/java/org/opensearch/action/index/IndexRequest.java index ccbe48ab40a51..7bf6b876fa652 100644 --- a/server/src/main/java/org/opensearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/opensearch/action/index/IndexRequest.java @@ -47,7 +47,6 @@ import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.cluster.metadata.Metadata; import org.opensearch.common.Nullable; -import org.opensearch.common.Strings; import org.opensearch.common.UUIDs; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; @@ -77,7 +76,7 @@ * Index request to index a typed JSON document into a specific index and make it searchable. Best * created using {@link org.opensearch.client.Requests#indexRequest(String)}. * - * The index requires the {@link #index()}, {@link #type(String)}, {@link #id(String)} and + * The index requires the {@link #index()}, {@link #id(String)} and * {@link #source(byte[], XContentType)} to be set. * * The source (content to index) can be set in its bytes form using ({@link #source(byte[], XContentType)}), @@ -103,8 +102,6 @@ public class IndexRequest extends ReplicatedWriteRequest implement private static final ShardId NO_SHARD_ID = null; - // Set to null initially so we can know to override in bulk requests that have a default type. - private String type; private String id; @Nullable private String routing; @@ -143,7 +140,10 @@ public IndexRequest(StreamInput in) throws IOException { public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOException { super(shardId, in); - type = in.readOptionalString(); + if (in.getVersion().before(Version.V_2_0_0)) { + String type = in.readOptionalString(); + assert MapperService.SINGLE_MAPPING_NAME.equals(type) : "Expected [_doc] but received [" + type + "]"; + } id = in.readOptionalString(); routing = in.readOptionalString(); if (in.getVersion().before(LegacyESVersion.V_7_0_0)) { @@ -181,7 +181,7 @@ public IndexRequest() { } /** - * Constructs a new index request against the specific index. The {@link #type(String)} + * Constructs a new index request against the specific index. The * {@link #source(byte[], XContentType)} must be set. */ public IndexRequest(String index) { @@ -189,44 +189,12 @@ public IndexRequest(String index) { this.index = index; } - /** - * Constructs a new index request against the specific index and type. The - * {@link #source(byte[], XContentType)} must be set. - * @deprecated Types are in the process of being removed. Use {@link #IndexRequest(String)} instead. - */ - @Deprecated - public IndexRequest(String index, String type) { - super(NO_SHARD_ID); - this.index = index; - this.type = type; - } - - /** - * Constructs a new index request against the index, type, id and using the source. - * - * @param index The index to index into - * @param type The type to index into - * @param id The id of document - * - * @deprecated Types are in the process of being removed. Use {@link #IndexRequest(String)} with {@link #id(String)} instead. - */ - @Deprecated - public IndexRequest(String index, String type, String id) { - super(NO_SHARD_ID); - this.index = index; - this.type = type; - this.id = id; - } - @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = super.validate(); if (source == null) { validationException = addValidationError("source is missing", validationException); } - if (Strings.isEmpty(type())) { - validationException = addValidationError("type is missing", validationException); - } if (contentType == null) { validationException = addValidationError("content type is missing", validationException); } @@ -298,45 +266,6 @@ public XContentType getContentType() { return contentType; } - /** - * The type of the indexed document. - * @deprecated Types are in the process of being removed. - */ - @Deprecated - @Override - public String type() { - if (type == null) { - return MapperService.SINGLE_MAPPING_NAME; - } - return type; - } - - /** - * Sets the type of the indexed document. - * @deprecated Types are in the process of being removed. - */ - @Deprecated - @Override - public IndexRequest type(String type) { - this.type = type; - return this; - } - - /** - * Set the default type supplied to a bulk - * request if this individual request's type is null - * or empty - * @deprecated Types are in the process of being removed. - */ - @Deprecated - @Override - public IndexRequest defaultTypeIfNull(String defaultType) { - if (Strings.isNullOrEmpty(type)) { - type = defaultType; - } - return this; - } - /** * The id of the indexed document. If not set, will be automatically generated. */ @@ -686,8 +615,8 @@ public VersionType versionType() { public void process(Version indexCreatedVersion, @Nullable MappingMetadata mappingMd, String concreteIndex) { if (mappingMd != null) { // might as well check for routing here - if (mappingMd.routing().required() && routing == null) { - throw new RoutingMissingException(concreteIndex, type(), id); + if (mappingMd.routingRequired() && routing == null) { + throw new RoutingMissingException(concreteIndex, id); } } @@ -733,9 +662,9 @@ public void writeThin(StreamOutput out) throws IOException { } private void writeBody(StreamOutput out) throws IOException { - // A 7.x request allows null types but if deserialized in a 6.x node will cause nullpointer exceptions. - // So we use the type accessor method here to make the type non-null (will default it to "_doc"). - out.writeOptionalString(type()); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeOptionalString(MapperService.SINGLE_MAPPING_NAME); + } out.writeOptionalString(id); out.writeOptionalString(routing); if (out.getVersion().before(LegacyESVersion.V_7_0_0)) { @@ -782,7 +711,7 @@ public String toString() { } catch (Exception e) { // ignore } - return "index {[" + index + "][" + type() + "][" + id + "], source[" + sSource + "]}"; + return "index {[" + index + "][" + id + "], source[" + sSource + "]}"; } @Override diff --git a/server/src/main/java/org/opensearch/action/index/IndexRequestBuilder.java b/server/src/main/java/org/opensearch/action/index/IndexRequestBuilder.java index ff13239717cda..cef5ef0f85c62 100644 --- a/server/src/main/java/org/opensearch/action/index/IndexRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/index/IndexRequestBuilder.java @@ -59,14 +59,6 @@ public IndexRequestBuilder(OpenSearchClient client, IndexAction action, @Nullabl super(client, action, new IndexRequest(index)); } - /** - * Sets the type to index the document to. - */ - public IndexRequestBuilder setType(String type) { - request.type(type); - return this; - } - /** * Sets the id to index the document under. Optional, and if not set, one will be automatically * generated. diff --git a/server/src/main/java/org/opensearch/action/index/IndexResponse.java b/server/src/main/java/org/opensearch/action/index/IndexResponse.java index 9a25cbee43da2..be0826ce84f96 100644 --- a/server/src/main/java/org/opensearch/action/index/IndexResponse.java +++ b/server/src/main/java/org/opensearch/action/index/IndexResponse.java @@ -59,12 +59,12 @@ public IndexResponse(StreamInput in) throws IOException { super(in); } - public IndexResponse(ShardId shardId, String type, String id, long seqNo, long primaryTerm, long version, boolean created) { - this(shardId, type, id, seqNo, primaryTerm, version, created ? Result.CREATED : Result.UPDATED); + public IndexResponse(ShardId shardId, String id, long seqNo, long primaryTerm, long version, boolean created) { + this(shardId, id, seqNo, primaryTerm, version, created ? Result.CREATED : Result.UPDATED); } - private IndexResponse(ShardId shardId, String type, String id, long seqNo, long primaryTerm, long version, Result result) { - super(shardId, type, id, seqNo, primaryTerm, version, assertCreatedOrUpdated(result)); + private IndexResponse(ShardId shardId, String id, long seqNo, long primaryTerm, long version, Result result) { + super(shardId, id, seqNo, primaryTerm, version, assertCreatedOrUpdated(result)); } private static Result assertCreatedOrUpdated(Result result) { @@ -82,7 +82,6 @@ public String toString() { StringBuilder builder = new StringBuilder(); builder.append("IndexResponse["); builder.append("index=").append(getIndex()); - builder.append(",type=").append(getType()); builder.append(",id=").append(getId()); builder.append(",version=").append(getVersion()); builder.append(",result=").append(getResult().getLowercase()); @@ -117,7 +116,7 @@ public static void parseXContentFields(XContentParser parser, Builder context) t public static class Builder extends DocWriteResponse.Builder { @Override public IndexResponse build() { - IndexResponse indexResponse = new IndexResponse(shardId, type, id, seqNo, primaryTerm, version, result); + IndexResponse indexResponse = new IndexResponse(shardId, id, seqNo, primaryTerm, version, result); indexResponse.setForcedRefresh(forcedRefresh); if (shardInfo != null) { indexResponse.setShardInfo(shardInfo); diff --git a/server/src/main/java/org/opensearch/action/ingest/SimulatePipelineRequest.java b/server/src/main/java/org/opensearch/action/ingest/SimulatePipelineRequest.java index e8f7b901f6e9c..6223f25488d88 100644 --- a/server/src/main/java/org/opensearch/action/ingest/SimulatePipelineRequest.java +++ b/server/src/main/java/org/opensearch/action/ingest/SimulatePipelineRequest.java @@ -200,7 +200,6 @@ private static List parseDocs(Map config) { "[types removal] specifying _type in pipeline simulation requests is deprecated" ); } - String type = ConfigurationUtils.readStringOrIntProperty(null, null, dataMap, Metadata.TYPE.getFieldName(), "_doc"); String id = ConfigurationUtils.readStringOrIntProperty(null, null, dataMap, Metadata.ID.getFieldName(), "_id"); String routing = ConfigurationUtils.readOptionalStringOrIntProperty(null, null, dataMap, Metadata.ROUTING.getFieldName()); Long version = null; @@ -213,7 +212,7 @@ private static List parseDocs(Map config) { ConfigurationUtils.readStringProperty(null, null, dataMap, Metadata.VERSION_TYPE.getFieldName()) ); } - IngestDocument ingestDocument = new IngestDocument(index, type, id, routing, version, versionType, document); + IngestDocument ingestDocument = new IngestDocument(index, id, routing, version, versionType, document); if (dataMap.containsKey(Metadata.IF_SEQ_NO.getFieldName())) { Long ifSeqNo = (Long) ConfigurationUtils.readObject(null, null, dataMap, Metadata.IF_SEQ_NO.getFieldName()); ingestDocument.setFieldValue(Metadata.IF_SEQ_NO.getFieldName(), ifSeqNo); diff --git a/server/src/main/java/org/opensearch/action/ingest/WriteableIngestDocument.java b/server/src/main/java/org/opensearch/action/ingest/WriteableIngestDocument.java index 7b451b23d0a97..2f8c65486c22f 100644 --- a/server/src/main/java/org/opensearch/action/ingest/WriteableIngestDocument.java +++ b/server/src/main/java/org/opensearch/action/ingest/WriteableIngestDocument.java @@ -66,24 +66,22 @@ final class WriteableIngestDocument implements Writeable, ToXContentFragment { a -> { HashMap sourceAndMetadata = new HashMap<>(); sourceAndMetadata.put(Metadata.INDEX.getFieldName(), a[0]); - sourceAndMetadata.put(Metadata.TYPE.getFieldName(), a[1]); - sourceAndMetadata.put(Metadata.ID.getFieldName(), a[2]); + sourceAndMetadata.put(Metadata.ID.getFieldName(), a[1]); + if (a[2] != null) { + sourceAndMetadata.put(Metadata.ROUTING.getFieldName(), a[2]); + } if (a[3] != null) { - sourceAndMetadata.put(Metadata.ROUTING.getFieldName(), a[3]); + sourceAndMetadata.put(Metadata.VERSION.getFieldName(), a[3]); } if (a[4] != null) { - sourceAndMetadata.put(Metadata.VERSION.getFieldName(), a[4]); - } - if (a[5] != null) { - sourceAndMetadata.put(Metadata.VERSION_TYPE.getFieldName(), a[5]); + sourceAndMetadata.put(Metadata.VERSION_TYPE.getFieldName(), a[4]); } - sourceAndMetadata.putAll((Map) a[6]); - return new WriteableIngestDocument(new IngestDocument(sourceAndMetadata, (Map) a[7])); + sourceAndMetadata.putAll((Map) a[5]); + return new WriteableIngestDocument(new IngestDocument(sourceAndMetadata, (Map) a[6])); } ); static { INGEST_DOC_PARSER.declareString(constructorArg(), new ParseField(Metadata.INDEX.getFieldName())); - INGEST_DOC_PARSER.declareString(constructorArg(), new ParseField(Metadata.TYPE.getFieldName())); INGEST_DOC_PARSER.declareString(constructorArg(), new ParseField(Metadata.ID.getFieldName())); INGEST_DOC_PARSER.declareString(optionalConstructorArg(), new ParseField(Metadata.ROUTING.getFieldName())); INGEST_DOC_PARSER.declareLong(optionalConstructorArg(), new ParseField(Metadata.VERSION.getFieldName())); diff --git a/server/src/main/java/org/opensearch/action/search/MultiSearchRequest.java b/server/src/main/java/org/opensearch/action/search/MultiSearchRequest.java index 2cff7cb5d270f..c45b6477d30f0 100644 --- a/server/src/main/java/org/opensearch/action/search/MultiSearchRequest.java +++ b/server/src/main/java/org/opensearch/action/search/MultiSearchRequest.java @@ -192,7 +192,6 @@ public static void readMultiLineFormat( CheckedBiConsumer consumer, String[] indices, IndicesOptions indicesOptions, - String[] types, String routing, String searchType, Boolean ccsMinimizeRoundtrips, @@ -225,9 +224,6 @@ public static void readMultiLineFormat( if (indicesOptions != null) { searchRequest.indicesOptions(indicesOptions); } - if (types != null && types.length > 0) { - searchRequest.types(types); - } if (routing != null) { searchRequest.routing(routing); } @@ -256,8 +252,6 @@ public static void readMultiLineFormat( throw new IllegalArgumentException("explicit index in multi search is not allowed"); } searchRequest.indices(nodeStringArrayValue(value)); - } else if ("type".equals(entry.getKey()) || "types".equals(entry.getKey())) { - searchRequest.types(nodeStringArrayValue(value)); } else if ("search_type".equals(entry.getKey()) || "searchType".equals(entry.getKey())) { searchRequest.searchType(nodeStringValue(value, null)); } else if ("ccs_minimize_roundtrips".equals(entry.getKey()) || "ccsMinimizeRoundtrips".equals(entry.getKey())) { @@ -359,9 +353,6 @@ public static void writeSearchRequestParams(SearchRequest request, XContentBuild xContentBuilder.field("ignore_unavailable", request.indicesOptions().ignoreUnavailable()); xContentBuilder.field("allow_no_indices", request.indicesOptions().allowNoIndices()); } - if (request.types() != null) { - xContentBuilder.field("types", request.types()); - } if (request.searchType() != null) { xContentBuilder.field("search_type", request.searchType().name().toLowerCase(Locale.ROOT)); } diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequest.java b/server/src/main/java/org/opensearch/action/search/SearchRequest.java index b753ec059b7af..c4b97c35bc405 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequest.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequest.java @@ -106,8 +106,6 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla private Integer preFilterShardSize; - private String[] types = Strings.EMPTY_ARRAY; - private boolean ccsMinimizeRoundtrips = true; public static final IndicesOptions DEFAULT_INDICES_OPTIONS = IndicesOptions.strictExpandOpenAndForbidClosedIgnoreThrottled(); @@ -204,7 +202,6 @@ private SearchRequest( this.scroll = searchRequest.scroll; this.searchType = searchRequest.searchType; this.source = searchRequest.source; - this.types = searchRequest.types; this.localClusterAlias = localClusterAlias; this.absoluteStartMillis = absoluteStartMillis; this.finalReduce = finalReduce; @@ -225,7 +222,15 @@ public SearchRequest(StreamInput in) throws IOException { preference = in.readOptionalString(); scroll = in.readOptionalWriteable(Scroll::new); source = in.readOptionalWriteable(SearchSourceBuilder::new); - types = in.readStringArray(); + if (in.getVersion().before(Version.V_2_0_0)) { + // types no longer relevant so ignore + String[] types = in.readStringArray(); + if (types.length > 0) { + throw new IllegalStateException( + "types are no longer supported in search requests but found [" + Arrays.toString(types) + "]" + ); + } + } indicesOptions = IndicesOptions.readIndicesOptions(in); requestCache = in.readOptionalBoolean(); batchedReduceSize = in.readVInt(); @@ -262,7 +267,10 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(preference); out.writeOptionalWriteable(scroll); out.writeOptionalWriteable(source); - out.writeStringArray(types); + if (out.getVersion().before(Version.V_2_0_0)) { + // types not supported so send an empty array to previous versions + out.writeStringArray(Strings.EMPTY_ARRAY); + } indicesOptions.writeIndicesOptions(out); out.writeOptionalBoolean(requestCache); out.writeVInt(batchedReduceSize); @@ -408,35 +416,6 @@ public void setCcsMinimizeRoundtrips(boolean ccsMinimizeRoundtrips) { this.ccsMinimizeRoundtrips = ccsMinimizeRoundtrips; } - /** - * The document types to execute the search against. Defaults to be executed against - * all types. - * - * @deprecated Types are in the process of being removed. Instead of using a type, prefer to - * filter on a field on the document. - */ - @Deprecated - public String[] types() { - return types; - } - - /** - * The document types to execute the search against. Defaults to be executed against - * all types. - * - * @deprecated Types are in the process of being removed. Instead of using a type, prefer to - * filter on a field on the document. - */ - @Deprecated - public SearchRequest types(String... types) { - Objects.requireNonNull(types, "types must not be null"); - for (String type : types) { - Objects.requireNonNull(type, "type must not be null"); - } - this.types = types; - return this; - } - /** * A comma separated list of routing values to control the shards the search will be executed on. */ @@ -702,9 +681,6 @@ public final String buildDescription() { sb.append("indices["); Strings.arrayToDelimitedString(indices, ",", sb); sb.append("], "); - sb.append("types["); - Strings.arrayToDelimitedString(types, ",", sb); - sb.append("], "); sb.append("search_type[").append(searchType).append("], "); if (scroll != null) { sb.append("scroll[").append(scroll.keepAlive()).append("], "); @@ -733,7 +709,6 @@ public boolean equals(Object o) { && Objects.equals(source, that.source) && Objects.equals(requestCache, that.requestCache) && Objects.equals(scroll, that.scroll) - && Arrays.equals(types, that.types) && Objects.equals(batchedReduceSize, that.batchedReduceSize) && Objects.equals(maxConcurrentShardRequests, that.maxConcurrentShardRequests) && Objects.equals(preFilterShardSize, that.preFilterShardSize) @@ -755,7 +730,6 @@ public int hashCode() { source, requestCache, scroll, - Arrays.hashCode(types), indicesOptions, batchedReduceSize, maxConcurrentShardRequests, @@ -777,8 +751,6 @@ public String toString() { + Arrays.toString(indices) + ", indicesOptions=" + indicesOptions - + ", types=" - + Arrays.toString(types) + ", routing='" + routing + '\'' diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequestBuilder.java b/server/src/main/java/org/opensearch/action/search/SearchRequestBuilder.java index 758c72b5926e3..6def33f82b7bd 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequestBuilder.java @@ -72,17 +72,6 @@ public SearchRequestBuilder setIndices(String... indices) { return this; } - /** - * The document types to execute the search against. Defaults to be executed against - * all types. - * @deprecated Types are going away, prefer filtering on a field. - */ - @Deprecated - public SearchRequestBuilder setTypes(String... types) { - request.types(types); - return this; - } - /** * The search type to execute, defaults to {@link SearchType#DEFAULT}. */ diff --git a/server/src/main/java/org/opensearch/action/support/master/info/ClusterInfoRequest.java b/server/src/main/java/org/opensearch/action/support/master/info/ClusterInfoRequest.java index 018464a5a0cd7..0b392caa3e588 100644 --- a/server/src/main/java/org/opensearch/action/support/master/info/ClusterInfoRequest.java +++ b/server/src/main/java/org/opensearch/action/support/master/info/ClusterInfoRequest.java @@ -32,6 +32,7 @@ package org.opensearch.action.support.master.info; +import org.opensearch.Version; import org.opensearch.action.IndicesRequest; import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.support.master.MasterNodeReadRequest; @@ -46,7 +47,6 @@ public abstract class ClusterInfoRequest { @@ -42,16 +41,16 @@ public MultiTermVectorsRequestBuilder(OpenSearchClient client, MultiTermVectorsA super(client, action, new MultiTermVectorsRequest()); } - public MultiTermVectorsRequestBuilder add(String index, @Nullable String type, Iterable ids) { + public MultiTermVectorsRequestBuilder add(String index, Iterable ids) { for (String id : ids) { - request.add(index, type, id); + request.add(index, id); } return this; } - public MultiTermVectorsRequestBuilder add(String index, @Nullable String type, String... ids) { + public MultiTermVectorsRequestBuilder add(String index, String... ids) { for (String id : ids) { - request.add(index, type, id); + request.add(index, id); } return this; } diff --git a/server/src/main/java/org/opensearch/action/termvectors/MultiTermVectorsResponse.java b/server/src/main/java/org/opensearch/action/termvectors/MultiTermVectorsResponse.java index f31aa15bf8167..599c2fa883dc7 100644 --- a/server/src/main/java/org/opensearch/action/termvectors/MultiTermVectorsResponse.java +++ b/server/src/main/java/org/opensearch/action/termvectors/MultiTermVectorsResponse.java @@ -33,6 +33,7 @@ package org.opensearch.action.termvectors; import org.opensearch.OpenSearchException; +import org.opensearch.Version; import org.opensearch.action.ActionResponse; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -51,20 +52,21 @@ public class MultiTermVectorsResponse extends ActionResponse implements Iterable */ public static class Failure implements Writeable { private final String index; - private final String type; private final String id; private final Exception cause; - public Failure(String index, String type, String id, Exception cause) { + public Failure(String index, String id, Exception cause) { this.index = index; - this.type = type; this.id = id; this.cause = cause; } public Failure(StreamInput in) throws IOException { index = in.readString(); - type = in.readOptionalString(); + if (in.getVersion().before(Version.V_2_0_0)) { + // ignore removed type from pre-2.0.0 versions + in.readOptionalString(); + } id = in.readString(); cause = in.readException(); } @@ -76,16 +78,6 @@ public String getIndex() { return this.index; } - /** - * The type of the action. - * - * @deprecated Types are in the process of being removed. - */ - @Deprecated - public String getType() { - return type; - } - /** * The id of the action. */ @@ -103,7 +95,10 @@ public Exception getCause() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(index); - out.writeOptionalString(type); + if (out.getVersion().before(Version.V_2_0_0)) { + // types no longer supported + out.writeOptionalString(null); + } out.writeString(id); out.writeException(cause); } @@ -138,7 +133,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); Failure failure = response.getFailure(); builder.field(Fields._INDEX, failure.getIndex()); - builder.field(Fields._TYPE, failure.getType()); builder.field(Fields._ID, failure.getId()); OpenSearchException.generateFailureXContent(builder, params, failure.getCause(), true); builder.endObject(); diff --git a/server/src/main/java/org/opensearch/action/termvectors/TermVectorsRequest.java b/server/src/main/java/org/opensearch/action/termvectors/TermVectorsRequest.java index c9a98d09d717f..214d5f0d6d4fa 100644 --- a/server/src/main/java/org/opensearch/action/termvectors/TermVectorsRequest.java +++ b/server/src/main/java/org/opensearch/action/termvectors/TermVectorsRequest.java @@ -34,6 +34,7 @@ import org.opensearch.LegacyESVersion; import org.opensearch.OpenSearchParseException; +import org.opensearch.Version; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.RealtimeRequest; import org.opensearch.action.ValidateActions; @@ -45,7 +46,6 @@ import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.util.set.Sets; import org.opensearch.common.xcontent.XContentBuilder; @@ -53,7 +53,7 @@ import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.VersionType; -import org.opensearch.rest.action.document.RestTermVectorsAction; +import org.opensearch.index.mapper.MapperService; import java.io.IOException; import java.util.ArrayList; @@ -71,14 +71,11 @@ * Request returning the term vector (doc frequency, positions, offsets) for a * document. *

        - * Note, the {@link #index()}, {@link #type(String)} and {@link #id(String)} are + * Note, the {@link #index()}, and {@link #id(String)} are * required. */ public class TermVectorsRequest extends SingleShardRequest implements RealtimeRequest { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(TermVectorsRequest.class); - private static final ParseField INDEX = new ParseField("_index"); - private static final ParseField TYPE = new ParseField("_type"); private static final ParseField ID = new ParseField("_id"); private static final ParseField ROUTING = new ParseField("routing"); private static final ParseField VERSION = new ParseField("version"); @@ -91,8 +88,6 @@ public class TermVectorsRequest extends SingleShardRequest i private static final ParseField FILTER = new ParseField("filter"); private static final ParseField DOC = new ParseField("doc"); - private String type; - private String id; private BytesReference doc; @@ -176,7 +171,10 @@ public TermVectorsRequest() {} TermVectorsRequest(StreamInput in) throws IOException { super(in); - type = in.readString(); + if (in.getVersion().before(Version.V_2_0_0)) { + // types no longer supported; ignore for BWC + in.readString(); + } id = in.readString(); if (in.readBoolean()) { @@ -218,24 +216,20 @@ public TermVectorsRequest() {} /** * Constructs a new term vector request for a document that will be fetch - * from the provided index. Use {@link #type(String)} and - * {@link #id(String)} to specify the document to load. + * from the provided index. Use {@link #id(String)} to specify the document to load. */ - public TermVectorsRequest(String index, String type, String id) { + public TermVectorsRequest(String index, String id) { super(index); this.id = id; - this.type = type; } /** * Constructs a new term vector request for a document that will be fetch - * from the provided index. Use {@link #type(String)} and - * {@link #id(String)} to specify the document to load. + * from the provided index. Use {@link #id(String)} to specify the document to load. */ public TermVectorsRequest(TermVectorsRequest other) { super(other.index()); this.id = other.id(); - this.type = other.type(); if (other.doc != null) { this.doc = new BytesArray(other.doc().toBytesRef(), true); this.xContentType = other.xContentType; @@ -258,7 +252,6 @@ public TermVectorsRequest(TermVectorsRequest other) { public TermVectorsRequest(MultiGetRequest.Item item) { super(item.index()); this.id = item.id(); - this.type = item.type(); this.selectedFields(item.storedFields()); this.routing(item.routing()); } @@ -267,21 +260,6 @@ public EnumSet getFlags() { return flagsEnum; } - /** - * Sets the type of document to get the term vector for. - */ - public TermVectorsRequest type(String type) { - this.type = type; - return this; - } - - /** - * Returns the type of document to get the term vector for. - */ - public String type() { - return type; - } - /** * Returns the id of document the term vector is requested for. */ @@ -535,9 +513,6 @@ private void setFlag(Flag flag, boolean set) { @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = super.validateNonNullIndex(); - if (type == null) { - validationException = ValidateActions.addValidationError("type is missing", validationException); - } if (id == null && doc == null) { validationException = ValidateActions.addValidationError("id or doc is missing", validationException); } @@ -547,7 +522,10 @@ public ActionRequestValidationException validate() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeString(type); + if (out.getVersion().before(Version.V_2_0_0)) { + // types no longer supported; send "_doc" for bwc + out.writeString(MapperService.SINGLE_MAPPING_NAME); + } out.writeString(id); out.writeBoolean(doc != null); @@ -631,9 +609,6 @@ public static void parseRequest(TermVectorsRequest termVectorsRequest, XContentP } else if (INDEX.match(currentFieldName, parser.getDeprecationHandler())) { // the following is important for multi request parsing. termVectorsRequest.index = parser.text(); - } else if (TYPE.match(currentFieldName, parser.getDeprecationHandler())) { - termVectorsRequest.type = parser.text(); - deprecationLogger.deprecate("termvectors_with_types", RestTermVectorsAction.TYPES_DEPRECATION_MESSAGE); } else if (ID.match(currentFieldName, parser.getDeprecationHandler())) { if (termVectorsRequest.doc != null) { throw new OpenSearchParseException( diff --git a/server/src/main/java/org/opensearch/action/termvectors/TermVectorsRequestBuilder.java b/server/src/main/java/org/opensearch/action/termvectors/TermVectorsRequestBuilder.java index c985ebd43dbbe..7294db072ad38 100644 --- a/server/src/main/java/org/opensearch/action/termvectors/TermVectorsRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/termvectors/TermVectorsRequestBuilder.java @@ -54,11 +54,10 @@ public TermVectorsRequestBuilder(OpenSearchClient client, TermVectorsAction acti /** * Constructs a new term vector request builder for a document that will be fetch - * from the provided index. Use {@code index}, {@code type} and - * {@code id} to specify the document to load. + * from the provided index. Use {@code index}, and {@code id} to specify the document to load. */ - public TermVectorsRequestBuilder(OpenSearchClient client, TermVectorsAction action, String index, String type, String id) { - super(client, action, new TermVectorsRequest(index, type, id)); + public TermVectorsRequestBuilder(OpenSearchClient client, TermVectorsAction action, String index, String id) { + super(client, action, new TermVectorsRequest(index, id)); } /** @@ -69,14 +68,6 @@ public TermVectorsRequestBuilder setIndex(String index) { return this; } - /** - * Sets the type of the document. - */ - public TermVectorsRequestBuilder setType(String type) { - request.type(type); - return this; - } - /** * Sets the id of the document. */ diff --git a/server/src/main/java/org/opensearch/action/termvectors/TermVectorsResponse.java b/server/src/main/java/org/opensearch/action/termvectors/TermVectorsResponse.java index 17d269935574e..870609d526909 100644 --- a/server/src/main/java/org/opensearch/action/termvectors/TermVectorsResponse.java +++ b/server/src/main/java/org/opensearch/action/termvectors/TermVectorsResponse.java @@ -40,6 +40,7 @@ import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CharsRefBuilder; +import org.opensearch.Version; import org.opensearch.action.ActionResponse; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; @@ -49,6 +50,7 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.ToXContentObject; import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.index.mapper.MapperService; import java.io.IOException; import java.util.Collections; @@ -77,7 +79,6 @@ private static class FieldStrings { public static final String END_OFFSET = "end_offset"; public static final String PAYLOAD = "payload"; public static final String _INDEX = "_index"; - public static final String _TYPE = "_type"; public static final String _ID = "_id"; public static final String _VERSION = "_version"; public static final String FOUND = "found"; @@ -89,7 +90,6 @@ private static class FieldStrings { private BytesReference termVectors; private BytesReference headerRef; private String index; - private String type; private String id; private long docVersion; private boolean exists = false; @@ -104,9 +104,8 @@ private static class FieldStrings { int[] currentEndOffset = new int[0]; BytesReference[] currentPayloads = new BytesReference[0]; - public TermVectorsResponse(String index, String type, String id) { + public TermVectorsResponse(String index, String id) { this.index = index; - this.type = type; this.id = id; } @@ -114,7 +113,10 @@ public TermVectorsResponse(String index, String type, String id) { TermVectorsResponse(StreamInput in) throws IOException { index = in.readString(); - type = in.readString(); + if (in.getVersion().before(Version.V_2_0_0)) { + // ignore deprecated/removed type + in.readString(); + } id = in.readString(); docVersion = in.readVLong(); exists = in.readBoolean(); @@ -129,7 +131,10 @@ public TermVectorsResponse(String index, String type, String id) { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(index); - out.writeString(type); + if (out.getVersion().before(Version.V_2_0_0)) { + // send empty array to previous version since types are no longer supported + out.writeString(MapperService.SINGLE_MAPPING_NAME); + } out.writeString(id); out.writeVLong(docVersion); final boolean docExists = isExists(); @@ -180,11 +185,9 @@ public int size() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { assert index != null; - assert type != null; assert id != null; builder.startObject(); builder.field(FieldStrings._INDEX, index); - builder.field(FieldStrings._TYPE, type); if (!isArtificial()) { builder.field(FieldStrings._ID, id); } @@ -420,10 +423,6 @@ public String getIndex() { return index; } - public String getType() { - return type; - } - public String getId() { return id; } diff --git a/server/src/main/java/org/opensearch/action/termvectors/TransportMultiTermVectorsAction.java b/server/src/main/java/org/opensearch/action/termvectors/TransportMultiTermVectorsAction.java index 68bd89df0a397..127b31f329d09 100644 --- a/server/src/main/java/org/opensearch/action/termvectors/TransportMultiTermVectorsAction.java +++ b/server/src/main/java/org/opensearch/action/termvectors/TransportMultiTermVectorsAction.java @@ -92,7 +92,6 @@ protected void doExecute(Task task, final MultiTermVectorsRequest request, final null, new MultiTermVectorsResponse.Failure( termVectorsRequest.index(), - termVectorsRequest.type(), termVectorsRequest.id(), new IndexNotFoundException(termVectorsRequest.index()) ) @@ -108,9 +107,8 @@ protected void doExecute(Task task, final MultiTermVectorsRequest request, final null, new MultiTermVectorsResponse.Failure( concreteSingleIndex, - termVectorsRequest.type(), termVectorsRequest.id(), - new RoutingMissingException(concreteSingleIndex, termVectorsRequest.type(), termVectorsRequest.id()) + new RoutingMissingException(concreteSingleIndex, termVectorsRequest.id()) ) ) ); @@ -166,12 +164,7 @@ public void onFailure(Exception e) { shardRequest.locations.get(i), new MultiTermVectorsItemResponse( null, - new MultiTermVectorsResponse.Failure( - shardRequest.index(), - termVectorsRequest.type(), - termVectorsRequest.id(), - e - ) + new MultiTermVectorsResponse.Failure(shardRequest.index(), termVectorsRequest.id(), e) ) ); } diff --git a/server/src/main/java/org/opensearch/action/termvectors/TransportShardMultiTermsVectorAction.java b/server/src/main/java/org/opensearch/action/termvectors/TransportShardMultiTermsVectorAction.java index ded3bffa2bc75..511b68965ebdf 100644 --- a/server/src/main/java/org/opensearch/action/termvectors/TransportShardMultiTermsVectorAction.java +++ b/server/src/main/java/org/opensearch/action/termvectors/TransportShardMultiTermsVectorAction.java @@ -117,16 +117,15 @@ protected MultiTermVectorsShardResponse shardOperation(MultiTermVectorsShardRequ } else { logger.debug( () -> new ParameterizedMessage( - "{} failed to execute multi term vectors for [{}]/[{}]", + "{} failed to execute multi term vectors for [{}]", shardId, - termVectorsRequest.type(), termVectorsRequest.id() ), e ); response.add( request.locations.get(i), - new MultiTermVectorsResponse.Failure(request.index(), termVectorsRequest.type(), termVectorsRequest.id(), e) + new MultiTermVectorsResponse.Failure(request.index(), termVectorsRequest.id(), e) ); } } diff --git a/server/src/main/java/org/opensearch/action/termvectors/TransportTermVectorsAction.java b/server/src/main/java/org/opensearch/action/termvectors/TransportTermVectorsAction.java index 73471ed76e35c..3cfd9cf7da7c5 100644 --- a/server/src/main/java/org/opensearch/action/termvectors/TransportTermVectorsAction.java +++ b/server/src/main/java/org/opensearch/action/termvectors/TransportTermVectorsAction.java @@ -107,7 +107,7 @@ protected void resolveRequest(ClusterState state, InternalRequest request) { request.request().routing(state.metadata().resolveIndexRouting(request.request().routing(), request.request().index())); // Fail fast on the node that received the request. if (request.request().routing() == null && state.getMetadata().routingRequired(request.concreteIndex())) { - throw new RoutingMissingException(request.concreteIndex(), request.request().type(), request.request().id()); + throw new RoutingMissingException(request.concreteIndex(), request.request().id()); } } diff --git a/server/src/main/java/org/opensearch/action/update/TransportUpdateAction.java b/server/src/main/java/org/opensearch/action/update/TransportUpdateAction.java index 475f16cb96ae0..387c0d24ed4df 100644 --- a/server/src/main/java/org/opensearch/action/update/TransportUpdateAction.java +++ b/server/src/main/java/org/opensearch/action/update/TransportUpdateAction.java @@ -142,7 +142,7 @@ public static void resolveAndValidateRouting(Metadata metadata, String concreteI request.routing((metadata.resolveWriteIndexRouting(request.routing(), request.index()))); // Fail fast on the node that received the request, rather than failing when translating on the index or delete request. if (request.routing() == null && metadata.routingRequired(concreteIndex)) { - throw new RoutingMissingException(concreteIndex, request.type(), request.id()); + throw new RoutingMissingException(concreteIndex, request.id()); } } @@ -226,7 +226,6 @@ protected void shardOperation(final UpdateRequest request, final ActionListener< UpdateResponse update = new UpdateResponse( response.getShardInfo(), response.getShardId(), - response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(), @@ -267,7 +266,6 @@ protected void shardOperation(final UpdateRequest request, final ActionListener< UpdateResponse update = new UpdateResponse( response.getShardInfo(), response.getShardId(), - response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(), @@ -296,7 +294,6 @@ protected void shardOperation(final UpdateRequest request, final ActionListener< UpdateResponse update = new UpdateResponse( response.getShardInfo(), response.getShardId(), - response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(), @@ -325,7 +322,7 @@ protected void shardOperation(final UpdateRequest request, final ActionListener< if (indexServiceOrNull != null) { IndexShard shard = indexService.getShardOrNull(shardId.getId()); if (shard != null) { - shard.noopUpdate(request.type()); + shard.noopUpdate(); } } listener.onResponse(update); diff --git a/server/src/main/java/org/opensearch/action/update/UpdateHelper.java b/server/src/main/java/org/opensearch/action/update/UpdateHelper.java index d70cd9fbca8f4..0da41a3028edf 100644 --- a/server/src/main/java/org/opensearch/action/update/UpdateHelper.java +++ b/server/src/main/java/org/opensearch/action/update/UpdateHelper.java @@ -82,8 +82,7 @@ public UpdateHelper(ScriptService scriptService) { * Prepares an update request by converting it into an index or delete request or an update response (no action). */ public Result prepare(UpdateRequest request, IndexShard indexShard, LongSupplier nowInMillis) { - final GetResult getResult = indexShard.getService() - .getForUpdate(request.type(), request.id(), request.ifSeqNo(), request.ifPrimaryTerm()); + final GetResult getResult = indexShard.getService().getForUpdate(request.id(), request.ifSeqNo(), request.ifPrimaryTerm()); return prepare(indexShard.shardId(), request, getResult, nowInMillis); } @@ -97,7 +96,7 @@ protected Result prepare(ShardId shardId, UpdateRequest request, final GetResult return prepareUpsert(shardId, request, getResult, nowInMillis); } else if (getResult.internalSourceRef() == null) { // no source, we can't do anything, throw a failure... - throw new DocumentSourceMissingException(shardId, request.type(), request.id()); + throw new DocumentSourceMissingException(shardId, request.id()); } else if (request.script() == null && request.doc() != null) { // The request has no script, it is a new doc that should be merged with the old document return prepareUpdateIndexRequest(shardId, request, getResult, request.detectNoop()); @@ -138,7 +137,7 @@ Tuple> executeScriptedUpsert(Map ctx = new HashMap<>(16); ctx.put(ContextFields.OP, UpdateOpType.INDEX.toString()); // The default operation is "index" ctx.put(ContextFields.INDEX, getResult.getIndex()); - ctx.put(ContextFields.TYPE, getResult.getType()); ctx.put(ContextFields.ID, getResult.getId()); ctx.put(ContextFields.VERSION, getResult.getVersion()); ctx.put(ContextFields.ROUTING, routing); @@ -288,7 +282,6 @@ Result prepareUpdateScriptRequest(ShardId shardId, UpdateRequest request, GetRes switch (operation) { case INDEX: final IndexRequest indexRequest = Requests.indexRequest(request.index()) - .type(request.type()) .id(request.id()) .routing(routing) .source(updatedSourceAsMap, updateSourceContentType) @@ -300,7 +293,6 @@ Result prepareUpdateScriptRequest(ShardId shardId, UpdateRequest request, GetRes return new Result(indexRequest, DocWriteResponse.Result.UPDATED, updatedSourceAsMap, updateSourceContentType); case DELETE: DeleteRequest deleteRequest = Requests.deleteRequest(request.index()) - .type(request.type()) .id(request.id()) .routing(routing) .setIfSeqNo(getResult.getSeqNo()) @@ -313,7 +305,6 @@ Result prepareUpdateScriptRequest(ShardId shardId, UpdateRequest request, GetRes // If it was neither an INDEX or DELETE operation, treat it as a noop UpdateResponse update = new UpdateResponse( shardId, - getResult.getType(), getResult.getId(), getResult.getSeqNo(), getResult.getPrimaryTerm(), @@ -386,7 +377,6 @@ public static GetResult extractGetResult( // TODO when using delete/none, we can still return the source as bytes by generating it (using the sourceContentType) return new GetResult( concreteIndex, - request.type(), request.id(), seqNo, primaryTerm, diff --git a/server/src/main/java/org/opensearch/action/update/UpdateRequest.java b/server/src/main/java/org/opensearch/action/update/UpdateRequest.java index ee7ed695dcba8..36be9f0160c9a 100644 --- a/server/src/main/java/org/opensearch/action/update/UpdateRequest.java +++ b/server/src/main/java/org/opensearch/action/update/UpdateRequest.java @@ -34,6 +34,7 @@ import org.apache.lucene.util.RamUsageEstimator; import org.opensearch.LegacyESVersion; +import org.opensearch.Version; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.DocWriteRequest; import org.opensearch.action.index.IndexRequest; @@ -122,8 +123,6 @@ public class UpdateRequest extends InstanceShardOperationRequest PARSER.declareLong(UpdateRequest::setIfPrimaryTerm, IF_PRIMARY_TERM); } - // Set to null initially so we can know to override in bulk requests that have a default type. - private String type; private String id; @Nullable private String routing; @@ -160,7 +159,10 @@ public UpdateRequest(StreamInput in) throws IOException { public UpdateRequest(@Nullable ShardId shardId, StreamInput in) throws IOException { super(shardId, in); waitForActiveShards = ActiveShardCount.readFrom(in); - type = in.readString(); + if (in.getVersion().before(Version.V_2_0_0)) { + String type = in.readString(); + assert MapperService.SINGLE_MAPPING_NAME.equals(type) : "Expected [_doc] but received [" + type + "]"; + } id = in.readString(); routing = in.readOptionalString(); if (in.getVersion().before(LegacyESVersion.V_7_0_0)) { @@ -210,25 +212,12 @@ public UpdateRequest(String index, String id) { this.id = id; } - /** - * @deprecated Types are in the process of being removed. Use {@link #UpdateRequest(String, String)} instead. - */ - @Deprecated - public UpdateRequest(String index, String type, String id) { - super(index); - this.type = type; - this.id = id; - } - @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = super.validate(); if (upsertRequest != null && upsertRequest.version() != Versions.MATCH_ANY) { validationException = addValidationError("can't provide version in upsert request", validationException); } - if (Strings.isEmpty(type())) { - validationException = addValidationError("type is missing", validationException); - } if (Strings.isEmpty(id)) { validationException = addValidationError("id is missing", validationException); } @@ -263,46 +252,6 @@ public ActionRequestValidationException validate() { return validationException; } - /** - * The type of the indexed document. - * - * @deprecated Types are in the process of being removed. - */ - @Deprecated - @Override - public String type() { - if (type == null) { - return MapperService.SINGLE_MAPPING_NAME; - } - return type; - } - - /** - * Sets the type of the indexed document. - * - * @deprecated Types are in the process of being removed. - */ - @Deprecated - public UpdateRequest type(String type) { - this.type = type; - return this; - } - - /** - * Set the default type supplied to a bulk - * request if this individual request's type is null - * or empty - * @deprecated Types are in the process of being removed. - */ - @Deprecated - @Override - public UpdateRequest defaultTypeIfNull(String defaultType) { - if (Strings.isNullOrEmpty(type)) { - type = defaultType; - } - return this; - } - /** * The id of the indexed document. */ @@ -934,9 +883,9 @@ public void writeThin(StreamOutput out) throws IOException { private void doWrite(StreamOutput out, boolean thin) throws IOException { waitForActiveShards.writeTo(out); - // A 7.x request allows null types but if deserialized in a 6.x node will cause nullpointer exceptions. - // So we use the type accessor method here to make the type non-null (will default it to "_doc"). - out.writeString(type()); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeString(MapperService.SINGLE_MAPPING_NAME); + } out.writeString(id); out.writeOptionalString(routing); if (out.getVersion().before(LegacyESVersion.V_7_0_0)) { @@ -956,7 +905,6 @@ private void doWrite(StreamOutput out, boolean thin) throws IOException { out.writeBoolean(true); // make sure the basics are set doc.index(index); - doc.type(type); doc.id(id); if (thin) { doc.writeThin(out); @@ -974,7 +922,6 @@ private void doWrite(StreamOutput out, boolean thin) throws IOException { out.writeBoolean(true); // make sure the basics are set upsertRequest.index(index); - upsertRequest.type(type); upsertRequest.id(id); if (thin) { upsertRequest.writeThin(out); @@ -1054,13 +1001,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public String toString() { - StringBuilder res = new StringBuilder().append("update {[") - .append(index) - .append("][") - .append(type()) - .append("][") - .append(id) - .append("]"); + StringBuilder res = new StringBuilder().append("update {[").append(index).append("][").append(id).append("]"); res.append(", doc_as_upsert[").append(docAsUpsert).append("]"); if (doc != null) { res.append(", doc[").append(doc).append("]"); diff --git a/server/src/main/java/org/opensearch/action/update/UpdateRequestBuilder.java b/server/src/main/java/org/opensearch/action/update/UpdateRequestBuilder.java index 3acbfe6dced12..73e470bf8ba69 100644 --- a/server/src/main/java/org/opensearch/action/update/UpdateRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/update/UpdateRequestBuilder.java @@ -54,16 +54,8 @@ public UpdateRequestBuilder(OpenSearchClient client, UpdateAction action) { super(client, action, new UpdateRequest()); } - public UpdateRequestBuilder(OpenSearchClient client, UpdateAction action, String index, String type, String id) { - super(client, action, new UpdateRequest(index, type, id)); - } - - /** - * Sets the type of the indexed document. - */ - public UpdateRequestBuilder setType(String type) { - request.type(type); - return this; + public UpdateRequestBuilder(OpenSearchClient client, UpdateAction action, String index, String id) { + super(client, action, new UpdateRequest(index, id)); } /** diff --git a/server/src/main/java/org/opensearch/action/update/UpdateResponse.java b/server/src/main/java/org/opensearch/action/update/UpdateResponse.java index 4842d7dd03b77..2c6efaf3c5f6b 100644 --- a/server/src/main/java/org/opensearch/action/update/UpdateResponse.java +++ b/server/src/main/java/org/opensearch/action/update/UpdateResponse.java @@ -69,21 +69,12 @@ public UpdateResponse(StreamInput in) throws IOException { * Constructor to be used when a update didn't translate in a write. * For example: update script with operation set to none */ - public UpdateResponse(ShardId shardId, String type, String id, long seqNo, long primaryTerm, long version, Result result) { - this(new ShardInfo(0, 0), shardId, type, id, seqNo, primaryTerm, version, result); + public UpdateResponse(ShardId shardId, String id, long seqNo, long primaryTerm, long version, Result result) { + this(new ShardInfo(0, 0), shardId, id, seqNo, primaryTerm, version, result); } - public UpdateResponse( - ShardInfo shardInfo, - ShardId shardId, - String type, - String id, - long seqNo, - long primaryTerm, - long version, - Result result - ) { - super(shardId, type, id, seqNo, primaryTerm, version, result); + public UpdateResponse(ShardInfo shardInfo, ShardId shardId, String id, long seqNo, long primaryTerm, long version, Result result) { + super(shardId, id, seqNo, primaryTerm, version, result); setShardInfo(shardInfo); } @@ -137,7 +128,6 @@ public String toString() { StringBuilder builder = new StringBuilder(); builder.append("UpdateResponse["); builder.append("index=").append(getIndex()); - builder.append(",type=").append(getType()); builder.append(",id=").append(getId()); builder.append(",version=").append(getVersion()); builder.append(",seqNo=").append(getSeqNo()); @@ -190,15 +180,14 @@ public void setGetResult(GetResult getResult) { public UpdateResponse build() { UpdateResponse update; if (shardInfo != null) { - update = new UpdateResponse(shardInfo, shardId, type, id, seqNo, primaryTerm, version, result); + update = new UpdateResponse(shardInfo, shardId, id, seqNo, primaryTerm, version, result); } else { - update = new UpdateResponse(shardId, type, id, seqNo, primaryTerm, version, result); + update = new UpdateResponse(shardId, id, seqNo, primaryTerm, version, result); } if (getResult != null) { update.setGetResult( new GetResult( update.getIndex(), - update.getType(), update.getId(), getResult.getSeqNo(), getResult.getPrimaryTerm(), diff --git a/server/src/main/java/org/opensearch/bootstrap/BootstrapInfo.java b/server/src/main/java/org/opensearch/bootstrap/BootstrapInfo.java index 49787eeb09b66..d45d8ddab9c2c 100644 --- a/server/src/main/java/org/opensearch/bootstrap/BootstrapInfo.java +++ b/server/src/main/java/org/opensearch/bootstrap/BootstrapInfo.java @@ -79,7 +79,7 @@ public static boolean isSystemCallFilterInstalled() { // create a view of sysprops map that does not allow modifications // this must be done this way (e.g. versus an actual typed map), because - // some test methods still change properties, so whitelisted changes must + // some test methods still change properties, so allowlisted changes must // be reflected in this view. private static final Dictionary SYSTEM_PROPERTIES; static { diff --git a/server/src/main/java/org/opensearch/bootstrap/SystemCallFilter.java b/server/src/main/java/org/opensearch/bootstrap/SystemCallFilter.java index 434af8fceb1dc..8e179de9c28df 100644 --- a/server/src/main/java/org/opensearch/bootstrap/SystemCallFilter.java +++ b/server/src/main/java/org/opensearch/bootstrap/SystemCallFilter.java @@ -227,7 +227,7 @@ static SockFilter BPF_JUMP(int code, int k, int jt, int jf) { static class Arch { /** AUDIT_ARCH_XXX constant from linux/audit.h */ final int audit; - /** syscall limit (necessary for blacklisting on amd64, to ban 32-bit syscalls) */ + /** syscall limit (necessary for denylisting on amd64, to ban 32-bit syscalls) */ final int limit; /** __NR_fork */ final int fork; diff --git a/server/src/main/java/org/opensearch/client/Client.java b/server/src/main/java/org/opensearch/client/Client.java index b6192643ac3fc..bca68834ca3cf 100644 --- a/server/src/main/java/org/opensearch/client/Client.java +++ b/server/src/main/java/org/opensearch/client/Client.java @@ -112,7 +112,7 @@ public interface Client extends OpenSearchClient, Releasable { AdminClient admin(); /** - * Index a JSON source associated with a given index and type. + * Index a JSON source associated with a given index. *

        * The id is optional, if it is not provided, one will be generated automatically. * @@ -123,7 +123,7 @@ public interface Client extends OpenSearchClient, Releasable { ActionFuture index(IndexRequest request); /** - * Index a document associated with a given index and type. + * Index a document associated with a given index. *

        * The id is optional, if it is not provided, one will be generated automatically. * @@ -134,12 +134,21 @@ public interface Client extends OpenSearchClient, Releasable { void index(IndexRequest request, ActionListener listener); /** - * Index a document associated with a given index and type. + * Index a document associated with a given index. *

        * The id is optional, if it is not provided, one will be generated automatically. */ IndexRequestBuilder prepareIndex(); + /** + * Index a document associated with a given index. + *

        + * The id is optional, if it is not provided, one will be generated automatically. + * + * @param index The index to index the document to + */ + IndexRequestBuilder prepareIndex(String index); + /** * Updates a document based on a script. * @@ -164,31 +173,10 @@ public interface Client extends OpenSearchClient, Releasable { /** * Updates a document based on a script. */ - UpdateRequestBuilder prepareUpdate(String index, String type, String id); - - /** - * Index a document associated with a given index and type. - *

        - * The id is optional, if it is not provided, one will be generated automatically. - * - * @param index The index to index the document to - * @param type The type to index the document to - */ - IndexRequestBuilder prepareIndex(String index, String type); - - /** - * Index a document associated with a given index and type. - *

        - * The id is optional, if it is not provided, one will be generated automatically. - * - * @param index The index to index the document to - * @param type The type to index the document to - * @param id The id of the document - */ - IndexRequestBuilder prepareIndex(String index, String type, @Nullable String id); + UpdateRequestBuilder prepareUpdate(String index, String id); /** - * Deletes a document from the index based on the index, type and id. + * Deletes a document from the index based on the index, and id. * * @param request The delete request * @return The result future @@ -197,7 +185,7 @@ public interface Client extends OpenSearchClient, Releasable { ActionFuture delete(DeleteRequest request); /** - * Deletes a document from the index based on the index, type and id. + * Deletes a document from the index based on the index, and id. * * @param request The delete request * @param listener A listener to be notified with a result @@ -206,18 +194,17 @@ public interface Client extends OpenSearchClient, Releasable { void delete(DeleteRequest request, ActionListener listener); /** - * Deletes a document from the index based on the index, type and id. + * Deletes a document from the index based on the index, and id. */ DeleteRequestBuilder prepareDelete(); /** - * Deletes a document from the index based on the index, type and id. + * Deletes a document from the index based on the index, and id. * * @param index The index to delete the document from - * @param type The type of the document to delete * @param id The id of the document to delete */ - DeleteRequestBuilder prepareDelete(String index, String type, String id); + DeleteRequestBuilder prepareDelete(String index, String id); /** * Executes a bulk of index / delete operations. @@ -243,12 +230,12 @@ public interface Client extends OpenSearchClient, Releasable { BulkRequestBuilder prepareBulk(); /** - * Executes a bulk of index / delete operations with default index and/or type + * Executes a bulk of index / delete operations with default index */ - BulkRequestBuilder prepareBulk(@Nullable String globalIndex, @Nullable String globalType); + BulkRequestBuilder prepareBulk(@Nullable String globalIndex); /** - * Gets the document that was indexed from an index with a type and id. + * Gets the document that was indexed from an index with an id. * * @param request The get request * @return The result future @@ -257,7 +244,7 @@ public interface Client extends OpenSearchClient, Releasable { ActionFuture get(GetRequest request); /** - * Gets the document that was indexed from an index with a type and id. + * Gets the document that was indexed from an index with an id. * * @param request The get request * @param listener A listener to be notified with a result @@ -266,14 +253,14 @@ public interface Client extends OpenSearchClient, Releasable { void get(GetRequest request, ActionListener listener); /** - * Gets the document that was indexed from an index with a type and id. + * Gets the document that was indexed from an index with an id. */ GetRequestBuilder prepareGet(); /** - * Gets the document that was indexed from an index with a type (optional) and id. + * Gets the document that was indexed from an index with an id. */ - GetRequestBuilder prepareGet(String index, @Nullable String type, String id); + GetRequestBuilder prepareGet(String index, String id); /** * Multi get documents. @@ -291,7 +278,7 @@ public interface Client extends OpenSearchClient, Releasable { MultiGetRequestBuilder prepareMultiGet(); /** - * Search across one or more indices and one or more types with a query. + * Search across one or more indices with a query. * * @param request The search request * @return The result future @@ -300,7 +287,7 @@ public interface Client extends OpenSearchClient, Releasable { ActionFuture search(SearchRequest request); /** - * Search across one or more indices and one or more types with a query. + * Search across one or more indices with a query. * * @param request The search request * @param listener A listener to be notified of the result @@ -309,7 +296,7 @@ public interface Client extends OpenSearchClient, Releasable { void search(SearchRequest request, ActionListener listener); /** - * Search across one or more indices and one or more types with a query. + * Search across one or more indices with a query. */ SearchRequestBuilder prepareSearch(String... indices); @@ -375,10 +362,9 @@ public interface Client extends OpenSearchClient, Releasable { * Builder for the term vector request. * * @param index The index to load the document from - * @param type The type of the document * @param id The id of the document */ - TermVectorsRequestBuilder prepareTermVectors(String index, String type, String id); + TermVectorsRequestBuilder prepareTermVectors(String index, String id); /** * Multi get term vectors. @@ -399,10 +385,9 @@ public interface Client extends OpenSearchClient, Releasable { * Computes a score explanation for the specified request. * * @param index The index this explain is targeted for - * @param type The type this explain is targeted for * @param id The document identifier this explain is targeted for */ - ExplainRequestBuilder prepareExplain(String index, String type, String id); + ExplainRequestBuilder prepareExplain(String index, String id); /** * Computes a score explanation for the specified request. diff --git a/server/src/main/java/org/opensearch/client/Requests.java b/server/src/main/java/org/opensearch/client/Requests.java index a7818e9ac136b..d89f55a37a9cf 100644 --- a/server/src/main/java/org/opensearch/client/Requests.java +++ b/server/src/main/java/org/opensearch/client/Requests.java @@ -97,8 +97,8 @@ public static IndexRequest indexRequest() { } /** - * Create an index request against a specific index. Note the {@link IndexRequest#type(String)} must be - * set as well and optionally the {@link IndexRequest#id(String)}. + * Create an index request against a specific index. + * Note that setting {@link IndexRequest#id(String)} is optional. * * @param index The index name to index the request against * @return The index request @@ -109,8 +109,8 @@ public static IndexRequest indexRequest(String index) { } /** - * Creates a delete request against a specific index. Note the {@link DeleteRequest#type(String)} and - * {@link DeleteRequest#id(String)} must be set. + * Creates a delete request against a specific index. + * Note that {@link DeleteRequest#id(String)} must be set. * * @param index The index name to delete from * @return The delete request @@ -129,7 +129,7 @@ public static BulkRequest bulkRequest() { /** * Creates a get request to get the JSON source from an index based on a type and id. Note, the - * {@link GetRequest#type(String)} and {@link GetRequest#id(String)} must be set. + * {@link GetRequest#id(String)} must be set. * * @param index The index to get the JSON source from * @return The get request diff --git a/server/src/main/java/org/opensearch/client/support/AbstractClient.java b/server/src/main/java/org/opensearch/client/support/AbstractClient.java index 2e2c07a2433f4..a37d293ee5dd2 100644 --- a/server/src/main/java/org/opensearch/client/support/AbstractClient.java +++ b/server/src/main/java/org/opensearch/client/support/AbstractClient.java @@ -447,13 +447,8 @@ public IndexRequestBuilder prepareIndex() { } @Override - public IndexRequestBuilder prepareIndex(String index, String type) { - return prepareIndex(index, type, null); - } - - @Override - public IndexRequestBuilder prepareIndex(String index, String type, @Nullable String id) { - return prepareIndex().setIndex(index).setType(type).setId(id); + public IndexRequestBuilder prepareIndex(String index) { + return new IndexRequestBuilder(this, IndexAction.INSTANCE, index); } @Override @@ -468,12 +463,12 @@ public void update(final UpdateRequest request, final ActionListener indexNamesInShards = new HashSet<>(); shards.iterator().forEachRemaining(s -> { indexNamesInShards.add(s.key.getIndexName()); - assert source == null - || s.value.nodeId == null : "Shard snapshot must not be assigned to data node when copying from snapshot [" - + source - + "]"; + assert source == null || s.value.nodeId == null + : "Shard snapshot must not be assigned to data node when copying from snapshot [" + source + "]"; }); assert source == null || indexNames.isEmpty() == false : "No empty snapshot clones allowed"; assert source != null || indexNames.equals(indexNamesInShards) : "Indices in shards " @@ -348,12 +346,8 @@ private static boolean assertShardsConsistent( final boolean shardsCompleted = completed(shards.values()) && completed(clones.values()); // Check state consistency for normal snapshots and started clone operations if (source == null || clones.isEmpty() == false) { - assert (state.completed() && shardsCompleted) - || (state.completed() == false - && shardsCompleted == false) : "Completed state must imply all shards completed but saw state [" - + state - + "] and shards " - + shards; + assert (state.completed() && shardsCompleted) || (state.completed() == false && shardsCompleted == false) + : "Completed state must imply all shards completed but saw state [" + state + "] and shards " + shards; } if (source != null && state.completed()) { assert hasFailures(clones) == false || state == State.FAILED : "Failed shard clones in [" @@ -567,8 +561,8 @@ public Entry withStartedShards(ImmutableOpenMap sh userMetadata, version ); - assert updated.state().completed() == false - && completed(updated.shards().values()) == false : "Only running snapshots allowed but saw [" + updated + "]"; + assert updated.state().completed() == false && completed(updated.shards().values()) == false + : "Only running snapshots allowed but saw [" + updated + "]"; return updated; } @@ -966,8 +960,8 @@ private static boolean assertConsistentEntries(List entries) { for (Entry entry : entries) { for (ObjectObjectCursor shard : entry.shards()) { if (shard.value.isActive()) { - assert assignedShardsByRepo.computeIfAbsent(entry.repository(), k -> new HashSet<>()) - .add(shard.key) : "Found duplicate shard assignments in " + entries; + assert assignedShardsByRepo.computeIfAbsent(entry.repository(), k -> new HashSet<>()).add(shard.key) + : "Found duplicate shard assignments in " + entries; } } } diff --git a/server/src/main/java/org/opensearch/cluster/action/index/MappingUpdatedAction.java b/server/src/main/java/org/opensearch/cluster/action/index/MappingUpdatedAction.java index 4bc7e61a67240..f22d489ec6fd7 100644 --- a/server/src/main/java/org/opensearch/cluster/action/index/MappingUpdatedAction.java +++ b/server/src/main/java/org/opensearch/cluster/action/index/MappingUpdatedAction.java @@ -51,7 +51,6 @@ import org.opensearch.common.util.concurrent.UncategorizedExecutionException; import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.Index; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.Mapping; import java.util.concurrent.Semaphore; @@ -110,10 +109,7 @@ public void setClient(Client client) { * {@code timeout} is the master node timeout ({@link MasterNodeRequest#masterNodeTimeout()}), * potentially waiting for a master node to be available. */ - public void updateMappingOnMaster(Index index, String type, Mapping mappingUpdate, ActionListener listener) { - if (type.equals(MapperService.DEFAULT_MAPPING)) { - throw new IllegalArgumentException("_default_ mapping should not be updated"); - } + public void updateMappingOnMaster(Index index, Mapping mappingUpdate, ActionListener listener) { final RunOnce release = new RunOnce(() -> semaphore.release()); try { @@ -125,7 +121,7 @@ public void updateMappingOnMaster(Index index, String type, Mapping mappingUpdat } boolean successFullySent = false; try { - sendUpdateMapping(index, type, mappingUpdate, ActionListener.runBefore(listener, release::run)); + sendUpdateMapping(index, mappingUpdate, ActionListener.runBefore(listener, release::run)); successFullySent = true; } finally { if (successFullySent == false) { @@ -140,10 +136,9 @@ int blockedThreads() { } // can be overridden by tests - protected void sendUpdateMapping(Index index, String type, Mapping mappingUpdate, ActionListener listener) { + protected void sendUpdateMapping(Index index, Mapping mappingUpdate, ActionListener listener) { PutMappingRequest putMappingRequest = new PutMappingRequest(); putMappingRequest.setConcreteIndex(index); - putMappingRequest.type(type); putMappingRequest.source(mappingUpdate.toString(), XContentType.JSON); putMappingRequest.masterNodeTimeout(dynamicMappingUpdateTimeout); putMappingRequest.timeout(TimeValue.ZERO); diff --git a/server/src/main/java/org/opensearch/cluster/coordination/CoordinationState.java b/server/src/main/java/org/opensearch/cluster/coordination/CoordinationState.java index f9f6f5437f360..b28fde5d9cc16 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/CoordinationState.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/CoordinationState.java @@ -305,7 +305,7 @@ public boolean handleJoin(Join join) { boolean prevElectionWon = electionWon; electionWon = isElectionQuorum(joinVotes); assert !prevElectionWon || electionWon : // we cannot go from won to not won - "locaNode= " + localNode + ", join=" + join + ", joinVotes=" + joinVotes; + "locaNode= " + localNode + ", join=" + join + ", joinVotes=" + joinVotes; logger.debug( "handleJoin: added join {} from [{}] for election, electionWon={} lastAcceptedTerm={} lastAcceptedVersion={}", join, @@ -378,8 +378,8 @@ && getLastCommittedConfiguration().equals(getLastAcceptedConfiguration()) == fal throw new CoordinationStateRejectedException("only allow reconfiguration if joinVotes have quorum for new config"); } - assert clusterState.getLastCommittedConfiguration() - .equals(getLastCommittedConfiguration()) : "last committed configuration should not change"; + assert clusterState.getLastCommittedConfiguration().equals(getLastCommittedConfiguration()) + : "last committed configuration should not change"; lastPublishedVersion = clusterState.version(); lastPublishedConfiguration = clusterState.getLastAcceptedConfiguration(); diff --git a/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java index d5eb550ca4e6d..557f11f75d969 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java @@ -1207,8 +1207,8 @@ ClusterState getStateForMasterService() { private ClusterState clusterStateWithNoMasterBlock(ClusterState clusterState) { if (clusterState.nodes().getMasterNodeId() != null) { // remove block if it already exists before adding new one - assert clusterState.blocks() - .hasGlobalBlockWithId(NO_MASTER_BLOCK_ID) == false : "NO_MASTER_BLOCK should only be added by Coordinator"; + assert clusterState.blocks().hasGlobalBlockWithId(NO_MASTER_BLOCK_ID) == false + : "NO_MASTER_BLOCK should only be added by Coordinator"; final ClusterBlocks clusterBlocks = ClusterBlocks.builder() .blocks(clusterState.blocks()) .addGlobalBlock(noMasterBlockService.getNoMasterBlock()) diff --git a/server/src/main/java/org/opensearch/cluster/coordination/JoinTaskExecutor.java b/server/src/main/java/org/opensearch/cluster/coordination/JoinTaskExecutor.java index 62762937d5edb..ea5c33b4300a5 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/JoinTaskExecutor.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/JoinTaskExecutor.java @@ -193,20 +193,15 @@ public ClusterTasksResult execute(ClusterState currentState, List jo if (joiniedNodeNameIds.isEmpty() == false) { Set currentVotingConfigExclusions = currentState.getVotingConfigExclusions(); Set newVotingConfigExclusions = currentVotingConfigExclusions.stream() - .map( - e -> { - // Update nodeId in VotingConfigExclusion when a new node with excluded node name joins - if (CoordinationMetadata.VotingConfigExclusion.MISSING_VALUE_MARKER.equals(e.getNodeId()) - && joiniedNodeNameIds.containsKey(e.getNodeName())) { - return new CoordinationMetadata.VotingConfigExclusion( - joiniedNodeNameIds.get(e.getNodeName()), - e.getNodeName() - ); - } else { - return e; - } + .map(e -> { + // Update nodeId in VotingConfigExclusion when a new node with excluded node name joins + if (CoordinationMetadata.VotingConfigExclusion.MISSING_VALUE_MARKER.equals(e.getNodeId()) + && joiniedNodeNameIds.containsKey(e.getNodeName())) { + return new CoordinationMetadata.VotingConfigExclusion(joiniedNodeNameIds.get(e.getNodeName()), e.getNodeName()); + } else { + return e; } - ) + }) .collect(Collectors.toSet()); // if VotingConfigExclusions did get updated diff --git a/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java index 147c8987169c7..6510c57060fe0 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java @@ -660,39 +660,17 @@ public ImmutableOpenMap getAliases() { return this.aliases; } - /** - * Return an object that maps each type to the associated mappings. - * The return value is never {@code null} but may be empty if the index - * has no mappings. - * @deprecated Use {@link #mapping()} instead now that indices have a single type - */ - @Deprecated - public ImmutableOpenMap getMappings() { - return mappings; - } - /** * Return the concrete mapping for this index or {@code null} if this index has no mappings at all. */ @Nullable public MappingMetadata mapping() { for (ObjectObjectCursor cursor : mappings) { - if (cursor.key.equals(MapperService.DEFAULT_MAPPING) == false) { - return cursor.value; - } + return cursor.value; } return null; } - /** - * Get the default mapping. - * NOTE: this is always {@code null} for 7.x indices which are disallowed to have a default mapping. - */ - @Nullable - public MappingMetadata defaultMapping() { - return mappings.get(MapperService.DEFAULT_MAPPING); - } - public static final String INDEX_RESIZE_SOURCE_UUID_KEY = "index.resize.source.uuid"; public static final String INDEX_RESIZE_SOURCE_NAME_KEY = "index.resize.source.name"; public static final Setting INDEX_RESIZE_SOURCE_UUID = Setting.simpleString(INDEX_RESIZE_SOURCE_UUID_KEY); @@ -704,25 +682,6 @@ public Index getResizeSourceIndex() { : null; } - /** - * Sometimes, the default mapping exists and an actual mapping is not created yet (introduced), - * in this case, we want to return the default mapping in case it has some default mapping definitions. - *

        - * Note, once the mapping type is introduced, the default mapping is applied on the actual typed MappingMetadata, - * setting its routing, timestamp, and so on if needed. - */ - @Nullable - public MappingMetadata mappingOrDefault() { - MappingMetadata mapping = null; - for (ObjectCursor m : mappings.values()) { - if (mapping == null || mapping.type().equals(MapperService.DEFAULT_MAPPING)) { - mapping = m.value; - } - } - - return mapping; - } - ImmutableOpenMap getCustomData() { return this.customData; } @@ -1190,17 +1149,25 @@ public Builder settings(Settings settings) { return this; } - public MappingMetadata mapping(String type) { - return mappings.get(type); + public MappingMetadata mapping() { + return mappings.get(MapperService.SINGLE_MAPPING_NAME); } - public Builder putMapping(String type, String source) throws IOException { - putMapping(new MappingMetadata(type, XContentHelper.convertToMap(XContentFactory.xContent(source), source, true))); + public Builder putMapping(String source) throws IOException { + putMapping( + new MappingMetadata( + MapperService.SINGLE_MAPPING_NAME, + XContentHelper.convertToMap(XContentFactory.xContent(source), source, true) + ) + ); return this; } public Builder putMapping(MappingMetadata mappingMd) { - mappings.put(mappingMd.type(), mappingMd); + mappings.clear(); + if (mappingMd != null) { + mappings.put(mappingMd.type(), mappingMd); + } return this; } @@ -1337,14 +1304,6 @@ public IndexMetadata build() { ImmutableOpenMap.Builder tmpAliases = aliases; Settings tmpSettings = settings; - // update default mapping on the MappingMetadata - if (mappings.containsKey(MapperService.DEFAULT_MAPPING)) { - MappingMetadata defaultMapping = mappings.get(MapperService.DEFAULT_MAPPING); - for (ObjectCursor cursor : mappings.values()) { - cursor.value.updateDefaultMapping(defaultMapping); - } - } - /* * We expect that the metadata has been properly built to set the number of shards and the number of replicas, and do not rely * on the default values here. Those must have been set upstream. @@ -1497,23 +1456,25 @@ public static void toXContent(IndexMetadata indexMetadata, XContentBuilder build if (context != Metadata.XContentContext.API) { builder.startArray(KEY_MAPPINGS); - for (ObjectObjectCursor cursor : indexMetadata.getMappings()) { + MappingMetadata mmd = indexMetadata.mapping(); + if (mmd != null) { if (binary) { - builder.value(cursor.value.source().compressed()); + builder.value(mmd.source().compressed()); } else { - builder.map(XContentHelper.convertToMap(cursor.value.source().uncompressed(), true).v2()); + builder.map(XContentHelper.convertToMap(mmd.source().uncompressed(), true).v2()); } } builder.endArray(); } else { builder.startObject(KEY_MAPPINGS); - for (ObjectObjectCursor cursor : indexMetadata.getMappings()) { - Map mapping = XContentHelper.convertToMap(cursor.value.source().uncompressed(), false).v2(); - if (mapping.size() == 1 && mapping.containsKey(cursor.key)) { + MappingMetadata mmd = indexMetadata.mapping(); + if (mmd != null) { + Map mapping = XContentHelper.convertToMap(mmd.source().uncompressed(), false).v2(); + if (mapping.size() == 1 && mapping.containsKey(mmd.type())) { // the type name is the root value, reduce it - mapping = (Map) mapping.get(cursor.key); + mapping = (Map) mapping.get(mmd.type()); } - builder.field(cursor.key); + builder.field(mmd.type()); builder.map(mapping); } builder.endObject(); diff --git a/server/src/main/java/org/opensearch/cluster/metadata/IndexTemplateMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/IndexTemplateMetadata.java index 7cf3c3da24c52..810365589ae1f 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/IndexTemplateMetadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/IndexTemplateMetadata.java @@ -51,18 +51,17 @@ import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentParser; +import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.index.mapper.MapperService; import java.io.IOException; +import java.io.UncheckedIOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; -import static org.opensearch.cluster.metadata.Metadata.CONTEXT_MODE_PARAM; - public class IndexTemplateMetadata extends AbstractDiffable { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(IndexTemplateMetadata.class); @@ -162,12 +161,15 @@ public Settings settings() { return this.settings; } - public ImmutableOpenMap mappings() { - return this.mappings; + public CompressedXContent mappings() { + if (this.mappings.isEmpty()) { + return null; + } + return this.mappings.iterator().next().value; } - public ImmutableOpenMap getMappings() { - return this.mappings; + public CompressedXContent getMappings() { + return this.mappings(); } public ImmutableOpenMap aliases() { @@ -195,7 +197,7 @@ public boolean equals(Object o) { if (!settings.equals(that.settings)) return false; if (!patterns.equals(that.patterns)) return false; - return Objects.equals(version, that.version); + return Objects.equals(aliases, that.aliases) && Objects.equals(version, that.version); } @Override @@ -206,6 +208,7 @@ public int hashCode() { result = 31 * result + patterns.hashCode(); result = 31 * result + settings.hashCode(); result = 31 * result + mappings.hashCode(); + result = 31 * result + aliases.hashCode(); return result; } @@ -249,6 +252,19 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalVInt(version); } + @Override + public String toString() { + try { + XContentBuilder builder = JsonXContent.contentBuilder(); + builder.startObject(); + Builder.toXContentWithTypes(this, builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + return Strings.toString(builder); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + public static class Builder { private static final Set VALID_FIELDS = Sets.newHashSet( @@ -287,7 +303,7 @@ public Builder(IndexTemplateMetadata indexTemplateMetadata) { patterns(indexTemplateMetadata.patterns()); settings(indexTemplateMetadata.settings()); - mappings = ImmutableOpenMap.builder(indexTemplateMetadata.mappings()); + mappings = ImmutableOpenMap.builder(indexTemplateMetadata.mappings); aliases = ImmutableOpenMap.builder(indexTemplateMetadata.aliases()); } @@ -357,23 +373,6 @@ public static void toXContentWithTypes( builder.endObject(); } - /** - * Removes the nested type in the xContent representation of {@link IndexTemplateMetadata}. - * - * This method is useful to help bridge the gap between an the internal representation which still uses (the legacy format) a - * nested type in the mapping, and the external representation which does not use a nested type in the mapping. - */ - public static void removeType(IndexTemplateMetadata indexTemplateMetadata, XContentBuilder builder) throws IOException { - builder.startObject(); - toInnerXContent( - indexTemplateMetadata, - builder, - new ToXContent.MapParams(Collections.singletonMap("reduce_mappings", "true")), - false - ); - builder.endObject(); - } - /** * Serializes the template to xContent, making sure not to nest mappings under the * type name. @@ -400,10 +399,6 @@ private static void toInnerXContent( ToXContent.Params params, boolean includeTypeName ) throws IOException { - Metadata.XContentContext context = params.param(CONTEXT_MODE_PARAM) != null - ? Metadata.XContentContext.valueOf(params.param(CONTEXT_MODE_PARAM)) - : null; - builder.field("order", indexTemplateMetadata.order()); if (indexTemplateMetadata.version() != null) { builder.field("version", indexTemplateMetadata.version()); @@ -414,52 +409,19 @@ private static void toInnerXContent( indexTemplateMetadata.settings().toXContent(builder, params); builder.endObject(); - if (context == Metadata.XContentContext.API) { - builder.startObject("mappings"); - for (ObjectObjectCursor cursor1 : indexTemplateMetadata.mappings()) { - Map mapping = XContentHelper.convertToMap(cursor1.value.uncompressed(), false).v2(); - if (mapping.size() == 1 && mapping.containsKey(cursor1.key)) { - // the type name is the root value, reduce it - mapping = (Map) mapping.get(cursor1.key); - } - builder.field(cursor1.key); - builder.map(mapping); - } - builder.endObject(); - } else if (params.paramAsBoolean("reduce_mappings", false)) { - // The parameter include_type_name is only ever used in the REST API, where reduce_mappings is - // always set to true. We therefore only check for include_type_name in this branch. + includeTypeName &= (params.paramAsBoolean("reduce_mappings", false) == false); + CompressedXContent m = indexTemplateMetadata.mappings(); + if (m != null) { + Map documentMapping = XContentHelper.convertToMap(m.uncompressed(), true).v2(); if (includeTypeName == false) { - Map documentMapping = null; - for (ObjectObjectCursor cursor : indexTemplateMetadata.mappings()) { - if (!cursor.key.equals(MapperService.DEFAULT_MAPPING)) { - assert documentMapping == null; - Map mapping = XContentHelper.convertToMap(cursor.value.uncompressed(), true).v2(); - documentMapping = reduceMapping(cursor.key, mapping); - } - } - - if (documentMapping != null) { - builder.field("mappings", documentMapping); - } else { - builder.startObject("mappings").endObject(); - } + documentMapping = reduceMapping(documentMapping); } else { - builder.startObject("mappings"); - for (ObjectObjectCursor cursor : indexTemplateMetadata.mappings()) { - Map mapping = XContentHelper.convertToMap(cursor.value.uncompressed(), true).v2(); - mapping = reduceMapping(cursor.key, mapping); - builder.field(cursor.key); - builder.map(mapping); - } - builder.endObject(); + documentMapping = reduceEmptyMapping(documentMapping); } + builder.field("mappings"); + builder.map(documentMapping); } else { - builder.startArray("mappings"); - for (ObjectObjectCursor cursor : indexTemplateMetadata.mappings()) { - builder.map(XContentHelper.convertToMap(cursor.value.uncompressed(), true).v2()); - } - builder.endArray(); + builder.startObject("mappings").endObject(); } builder.startObject("aliases"); @@ -470,15 +432,22 @@ private static void toInnerXContent( } @SuppressWarnings("unchecked") - private static Map reduceMapping(String type, Map mapping) { - if (mapping.size() == 1 && mapping.containsKey(type)) { - // the type name is the root value, reduce it - return (Map) mapping.get(type); + private static Map reduceEmptyMapping(Map mapping) { + if (mapping.keySet().size() == 1 + && mapping.containsKey(MapperService.SINGLE_MAPPING_NAME) + && ((Map) mapping.get(MapperService.SINGLE_MAPPING_NAME)).size() == 0) { + return (Map) mapping.values().iterator().next(); } else { return mapping; } } + @SuppressWarnings("unchecked") + private static Map reduceMapping(Map mapping) { + assert mapping.keySet().size() == 1 : mapping.keySet(); + return (Map) mapping.values().iterator().next(); + } + public static IndexTemplateMetadata fromXContent(XContentParser parser, String templateName) throws IOException { Builder builder = new Builder(templateName); diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MappingMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/MappingMetadata.java index e3ab1d491131a..620542f8f1bde 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/MappingMetadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/MappingMetadata.java @@ -36,16 +36,21 @@ import org.opensearch.OpenSearchParseException; import org.opensearch.cluster.AbstractDiffable; import org.opensearch.cluster.Diff; +import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.compress.CompressedXContent; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContent; +import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.mapper.DocumentMapper; +import org.opensearch.index.mapper.MapperService; import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.Collections; import java.util.Map; +import java.util.Objects; import static org.opensearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; @@ -53,49 +58,21 @@ * Mapping configuration for a type. */ public class MappingMetadata extends AbstractDiffable { - - public static class Routing { - - public static final Routing EMPTY = new Routing(false); - - private final boolean required; - - public Routing(boolean required) { - this.required = required; - } - - public boolean required() { - return required; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Routing routing = (Routing) o; - - return required == routing.required; - } - - @Override - public int hashCode() { - return getClass().hashCode() + (required ? 1 : 0); - } - } + public static final MappingMetadata EMPTY_MAPPINGS = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, Collections.emptyMap()); private final String type; private final CompressedXContent source; - private Routing routing; + private final boolean routingRequired; public MappingMetadata(DocumentMapper docMapper) { this.type = docMapper.type(); this.source = docMapper.mappingSource(); - this.routing = new Routing(docMapper.routingFieldMapper().required()); + this.routingRequired = docMapper.routingFieldMapper().required(); } + @SuppressWarnings("unchecked") public MappingMetadata(CompressedXContent mapping) { this.source = mapping; Map mappingMap = XContentHelper.convertToMap(mapping.compressedReference(), true).v2(); @@ -103,22 +80,29 @@ public MappingMetadata(CompressedXContent mapping) { throw new IllegalStateException("Can't derive type from mapping, no root type: " + mapping.string()); } this.type = mappingMap.keySet().iterator().next(); - initMappers((Map) mappingMap.get(this.type)); + this.routingRequired = isRoutingRequired((Map) mappingMap.get(this.type)); } - public MappingMetadata(String type, Map mapping) throws IOException { + @SuppressWarnings("unchecked") + public MappingMetadata(String type, Map mapping) { this.type = type; - this.source = new CompressedXContent((builder, params) -> builder.mapContents(mapping), XContentType.JSON, ToXContent.EMPTY_PARAMS); + try { + XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().map(mapping); + this.source = new CompressedXContent(BytesReference.bytes(mappingBuilder)); + } catch (IOException e) { + throw new UncheckedIOException(e); // XContent exception, should never happen + } Map withoutType = mapping; if (mapping.size() == 1 && mapping.containsKey(type)) { withoutType = (Map) mapping.get(type); } - initMappers(withoutType); + this.routingRequired = isRoutingRequired(withoutType); } - private void initMappers(Map withoutType) { + @SuppressWarnings("unchecked") + private boolean isRoutingRequired(Map withoutType) { + boolean required = false; if (withoutType.containsKey("_routing")) { - boolean required = false; Map routingNode = (Map) withoutType.get("_routing"); for (Map.Entry entry : routingNode.entrySet()) { String fieldName = entry.getKey(); @@ -134,16 +118,8 @@ private void initMappers(Map withoutType) { } } } - this.routing = new Routing(required); - } else { - this.routing = Routing.EMPTY; - } - } - - void updateDefaultMapping(MappingMetadata defaultMapping) { - if (routing == Routing.EMPTY) { - routing = defaultMapping.routing(); } + return required; } public String type() { @@ -173,8 +149,8 @@ public Map getSourceAsMap() throws OpenSearchParseException { return sourceAsMap(); } - public Routing routing() { - return this.routing; + public boolean routingRequired() { + return this.routingRequired; } @Override @@ -182,7 +158,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(type()); source().writeTo(out); // routing - out.writeBoolean(routing().required()); + out.writeBoolean(routingRequired); if (out.getVersion().before(LegacyESVersion.V_7_0_0)) { out.writeBoolean(false); // hasParentField } @@ -195,7 +171,7 @@ public boolean equals(Object o) { MappingMetadata that = (MappingMetadata) o; - if (!routing.equals(that.routing)) return false; + if (!Objects.equals(this.routingRequired, that.routingRequired)) return false; if (!source.equals(that.source)) return false; if (!type.equals(that.type)) return false; @@ -204,17 +180,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - int result = type.hashCode(); - result = 31 * result + source.hashCode(); - result = 31 * result + routing.hashCode(); - return result; + return Objects.hash(type, source, routingRequired); } public MappingMetadata(StreamInput in) throws IOException { type = in.readString(); source = CompressedXContent.readCompressedString(in); // routing - routing = new Routing(in.readBoolean()); + routingRequired = in.readBoolean(); if (in.getVersion().before(LegacyESVersion.V_7_0_0)) { in.readBoolean(); // hasParentField } diff --git a/server/src/main/java/org/opensearch/cluster/metadata/Metadata.java b/server/src/main/java/org/opensearch/cluster/metadata/Metadata.java index 09e3bbe0cac32..6e9c30877f9c2 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/Metadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/Metadata.java @@ -454,44 +454,26 @@ public boolean hasAliases(final String[] aliases, String[] concreteIndices) { } /** - * Finds all mappings for types and concrete indices. Types are expanded to include all types that match the glob - * patterns in the types array. Empty types array, null or {"_all"} will be expanded to all types available for - * the given indices. Only fields that match the provided field filter will be returned (default is a predicate - * that always returns true, which can be overridden via plugins) + * Finds all mappings for concrete indices. Only fields that match the provided field + * filter will be returned (default is a predicate that always returns true, which can be + * overridden via plugins) * * @see MapperPlugin#getFieldFilter() * */ - public ImmutableOpenMap> findMappings( - String[] concreteIndices, - final String[] types, - Function> fieldFilter - ) throws IOException { - assert types != null; + public ImmutableOpenMap findMappings(String[] concreteIndices, Function> fieldFilter) + throws IOException { assert concreteIndices != null; if (concreteIndices.length == 0) { return ImmutableOpenMap.of(); } - boolean isAllTypes = isAllTypes(types); - ImmutableOpenMap.Builder> indexMapBuilder = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder indexMapBuilder = ImmutableOpenMap.builder(); Iterable intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys()); for (String index : intersection) { IndexMetadata indexMetadata = indices.get(index); Predicate fieldPredicate = fieldFilter.apply(index); - if (isAllTypes) { - indexMapBuilder.put(index, filterFields(indexMetadata.getMappings(), fieldPredicate)); - } else { - ImmutableOpenMap.Builder filteredMappings = ImmutableOpenMap.builder(); - for (ObjectObjectCursor cursor : indexMetadata.getMappings()) { - if (Regex.simpleMatch(types, cursor.key)) { - filteredMappings.put(cursor.key, filterFields(cursor.value, fieldPredicate)); - } - } - if (!filteredMappings.isEmpty()) { - indexMapBuilder.put(index, filteredMappings.build()); - } - } + indexMapBuilder.put(index, filterFields(indexMetadata.mapping(), fieldPredicate)); } return indexMapBuilder.build(); } @@ -514,22 +496,11 @@ public ImmutableOpenMap findDataStreams(Str return builder.build(); } - private static ImmutableOpenMap filterFields( - ImmutableOpenMap mappings, - Predicate fieldPredicate - ) throws IOException { - if (fieldPredicate == MapperPlugin.NOOP_FIELD_PREDICATE) { - return mappings; - } - ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(mappings.size()); - for (ObjectObjectCursor cursor : mappings) { - builder.put(cursor.key, filterFields(cursor.value, fieldPredicate)); - } - return builder.build(); // No types specified means return them all - } - @SuppressWarnings("unchecked") - private static MappingMetadata filterFields(MappingMetadata mappingMetadata, Predicate fieldPredicate) throws IOException { + private static MappingMetadata filterFields(MappingMetadata mappingMetadata, Predicate fieldPredicate) { + if (mappingMetadata == null) { + return MappingMetadata.EMPTY_MAPPINGS; + } if (fieldPredicate == MapperPlugin.NOOP_FIELD_PREDICATE) { return mappingMetadata; } @@ -909,7 +880,7 @@ public boolean routingRequired(String concreteIndex) { if (indexMetadata != null) { MappingMetadata mappingMetadata = indexMetadata.mapping(); if (mappingMetadata != null) { - return mappingMetadata.routing().required(); + return mappingMetadata.routingRequired(); } } return false; diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java index 7ef1248f7d3e1..cb76b7217624f 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java @@ -32,7 +32,6 @@ package org.opensearch.cluster.metadata; -import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -98,7 +97,6 @@ import java.nio.file.Path; import java.time.Instant; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -115,7 +113,6 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; -import static java.util.Collections.singletonMap; import static java.util.stream.Collectors.toList; import static org.opensearch.cluster.metadata.IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING; import static org.opensearch.cluster.metadata.IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING; @@ -452,7 +449,7 @@ private ClusterState applyCreateIndexWithTemporaryService( final boolean silent, final IndexMetadata sourceMetadata, final IndexMetadata temporaryIndexMeta, - final List>> mappings, + final List> mappings, final Function> aliasSupplier, final List templatesApplied, final BiConsumer metadataTransformer @@ -474,7 +471,6 @@ private ClusterState applyCreateIndexWithTemporaryService( request.index(), aliases, indexService.mapperService()::documentMapper, - () -> indexService.mapperService().documentMapper(MapperService.DEFAULT_MAPPING), temporaryIndexMeta.getSettings(), temporaryIndexMeta.getRoutingNumShards(), sourceMetadata, @@ -543,20 +539,10 @@ private ClusterState applyCreateIndexRequestWithV1Templates( templates.stream().map(IndexTemplateMetadata::name).collect(Collectors.toList()) ); - final Map> mappings = Collections.unmodifiableMap( + final Map mappings = Collections.unmodifiableMap( parseV1Mappings( request.mappings(), - templates.stream() - .map(IndexTemplateMetadata::getMappings) - // Converts the ImmutableOpenMap into a non-terrible HashMap - .map(iom -> { - Map converted = new HashMap<>(iom.size()); - for (ObjectObjectCursor cursor : iom) { - converted.put(cursor.key, cursor.value); - } - return converted; - }) - .collect(toList()), + templates.stream().map(IndexTemplateMetadata::getMappings).collect(toList()), xContentRegistry ) ); @@ -618,7 +604,7 @@ private ClusterState applyCreateIndexRequestWithV2Template( ); } - final List>> mappings = collectV2Mappings( + final List> mappings = collectV2Mappings( request.mappings(), currentState, templateName, @@ -661,29 +647,31 @@ private ClusterState applyCreateIndexRequestWithV2Template( ); } - public static List>> collectV2Mappings( - final Map requestMappings, + public static List> collectV2Mappings( + final String requestMappings, final ClusterState currentState, final String templateName, final NamedXContentRegistry xContentRegistry, final String indexName ) throws Exception { - List>> result = new ArrayList<>(); - List templateMappings = MetadataIndexTemplateService.collectMappings(currentState, templateName, indexName); + return collectV2Mappings(requestMappings, templateMappings, xContentRegistry); + } + + public static List> collectV2Mappings( + final String requestMappings, + final List templateMappings, + final NamedXContentRegistry xContentRegistry + ) throws Exception { + List> result = new ArrayList<>(); + for (CompressedXContent templateMapping : templateMappings) { Map parsedTemplateMapping = MapperService.parseMapping(xContentRegistry, templateMapping.string()); - result.add(singletonMap(MapperService.SINGLE_MAPPING_NAME, parsedTemplateMapping)); + result.add(parsedTemplateMapping); } - if (requestMappings.size() > 0) { - assert requestMappings.size() == 1 : "expected request metadata mappings to have 1 type but it had: " + requestMappings; - Map.Entry entry = requestMappings.entrySet().iterator().next(); - - String type = entry.getKey(); - Map parsedMappings = MapperService.parseMapping(xContentRegistry, entry.getValue()); - result.add(singletonMap(type, parsedMappings)); - } + Map parsedRequestMappings = MapperService.parseMapping(xContentRegistry, requestMappings); + result.add(parsedRequestMappings); return result; } @@ -696,7 +684,8 @@ private ClusterState applyCreateIndexRequestWithExistingMetadata( ) throws Exception { logger.info("applying create index request using existing index [{}] metadata", sourceMetadata.getIndex().getName()); - if (request.mappings().size() > 0) { + final Map mappings = MapperService.parseMapping(xContentRegistry, request.mappings()); + if (mappings.isEmpty() == false) { throw new IllegalArgumentException( "mappings are not allowed when creating an index from a source index, " + "all mappings are copied from the source index" ); @@ -721,7 +710,7 @@ private ClusterState applyCreateIndexRequestWithExistingMetadata( silent, sourceMetadata, tmpImd, - Collections.emptyList(), + Collections.singletonList(mappings), indexService -> resolveAndValidateAliases( request.index(), request.aliases(), @@ -747,55 +736,28 @@ private ClusterState applyCreateIndexRequestWithExistingMetadata( * {@link IndexTemplateMetadata#order()}). This merging makes no distinction between field * definitions, as may result in an invalid field definition */ - static Map> parseV1Mappings( - Map requestMappings, - List> templateMappings, + static Map parseV1Mappings( + String requestMappings, + List templateMappings, NamedXContentRegistry xContentRegistry ) throws Exception { - Map> mappings = new HashMap<>(); - for (Map.Entry entry : requestMappings.entrySet()) { - Map mapping = MapperService.parseMapping(xContentRegistry, entry.getValue()); - if (mapping.isEmpty()) { - // Someone provided an empty '{}' for mappings, which is okay, but to avoid - // tripping the below assertion, we can safely ignore it - continue; - } - assert mapping.size() == 1 : mapping; - assert entry.getKey().equals(mapping.keySet().iterator().next()) : entry.getKey() + " != " + mapping; - mappings.put(entry.getKey(), mapping); - } - + Map mappings = MapperService.parseMapping(xContentRegistry, requestMappings); // apply templates, merging the mappings into the request mapping if exists - for (Map tMapping : templateMappings) { - for (Map.Entry cursor : tMapping.entrySet()) { - String mappingString = cursor.getValue().string(); - String type = cursor.getKey(); - if (mappings.containsKey(type)) { - XContentHelper.mergeDefaults(mappings.get(type), MapperService.parseMapping(xContentRegistry, mappingString)); - } else if (mappings.size() == 1 && type.equals(MapperService.SINGLE_MAPPING_NAME)) { - // Typeless template with typed mapping - Map templateMapping = MapperService.parseMapping(xContentRegistry, mappingString); - assert templateMapping.size() == 1 : templateMapping; - assert type.equals(templateMapping.keySet().iterator().next()) : type + " != " + templateMapping; - Map.Entry> mappingEntry = mappings.entrySet().iterator().next(); - templateMapping = singletonMap( - mappingEntry.getKey(), // reuse type name from the mapping - templateMapping.values().iterator().next() - ); // but actual mappings from the template - XContentHelper.mergeDefaults(mappingEntry.getValue(), templateMapping); - } else if (tMapping.size() == 1 && mappings.containsKey(MapperService.SINGLE_MAPPING_NAME)) { - // Typed template with typeless mapping - Map templateMapping = MapperService.parseMapping(xContentRegistry, mappingString); - assert templateMapping.size() == 1 : templateMapping; - assert type.equals(templateMapping.keySet().iterator().next()) : type + " != " + templateMapping; - Map mapping = mappings.get(MapperService.SINGLE_MAPPING_NAME); - templateMapping = singletonMap( - MapperService.SINGLE_MAPPING_NAME, // make template mapping typeless - templateMapping.values().iterator().next() - ); - XContentHelper.mergeDefaults(mapping, templateMapping); + for (CompressedXContent mapping : templateMappings) { + if (mapping != null) { + Map templateMapping = MapperService.parseMapping(xContentRegistry, mapping.string()); + if (templateMapping.isEmpty()) { + // Someone provided an empty '{}' for mappings, which is okay, but to avoid + // tripping the below assertion, we can safely ignore it + continue; + } + assert templateMapping.size() == 1 : "expected exactly one mapping value, got: " + templateMapping; + // pre-8x templates may have a wrapper type other than _doc, so we re-wrap things here + templateMapping = Collections.singletonMap(MapperService.SINGLE_MAPPING_NAME, templateMapping.values().iterator().next()); + if (mappings.isEmpty()) { + mappings = templateMapping; } else { - mappings.put(type, MapperService.parseMapping(xContentRegistry, mappingString)); + XContentHelper.mergeDefaults(mappings, templateMapping); } } } @@ -986,9 +948,8 @@ static int getIndexNumberOfRoutingShards(Settings indexSettings, @Nullable Index routingNumShards = calculateNumRoutingShards(numTargetShards, indexVersionCreated); } } else { - assert IndexMetadata.INDEX_NUMBER_OF_ROUTING_SHARDS_SETTING.exists( - indexSettings - ) == false : "index.number_of_routing_shards should not be present on the target index on resize"; + assert IndexMetadata.INDEX_NUMBER_OF_ROUTING_SHARDS_SETTING.exists(indexSettings) == false + : "index.number_of_routing_shards should not be present on the target index on resize"; routingNumShards = sourceMetadata.getRoutingNumShards(); } return routingNumShards; @@ -1100,7 +1061,6 @@ static IndexMetadata buildIndexMetadata( String indexName, List aliases, Supplier documentMapperSupplier, - Supplier defaultDocumentMapperSupplier, Settings indexSettings, int routingNumShards, @Nullable IndexMetadata sourceMetadata, @@ -1110,11 +1070,10 @@ static IndexMetadata buildIndexMetadata( indexMetadataBuilder.system(isSystem); // now, update the mappings with the actual source Map mappingsMetadata = new HashMap<>(); - for (DocumentMapper mapper : Arrays.asList(documentMapperSupplier.get(), defaultDocumentMapperSupplier.get())) { - if (mapper != null) { - MappingMetadata mappingMd = new MappingMetadata(mapper); - mappingsMetadata.put(mapper.type(), mappingMd); - } + DocumentMapper mapper = documentMapperSupplier.get(); + if (mapper != null) { + MappingMetadata mappingMd = new MappingMetadata(mapper); + mappingsMetadata.put(mapper.type(), mappingMd); } for (MappingMetadata mappingMd : mappingsMetadata.values()) { @@ -1175,15 +1134,13 @@ private static ClusterBlocks.Builder createClusterBlocksBuilder(ClusterState cur private static void updateIndexMappingsAndBuildSortOrder( IndexService indexService, CreateIndexClusterStateUpdateRequest request, - List>> mappings, + List> mappings, @Nullable IndexMetadata sourceMetadata ) throws IOException { MapperService mapperService = indexService.mapperService(); - for (Map> mapping : mappings) { - if (!mapping.isEmpty()) { - assert mapping.size() == 1 : mapping; - Map.Entry> entry = mapping.entrySet().iterator().next(); - mapperService.merge(entry.getKey(), entry.getValue(), MergeReason.INDEX_TEMPLATE); + for (Map mapping : mappings) { + if (mapping.isEmpty() == false) { + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, mapping, MergeReason.INDEX_TEMPLATE); } } diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/opensearch/cluster/metadata/MetadataIndexTemplateService.java index eb9508781393f..22cd5c1dbbbe2 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/MetadataIndexTemplateService.java @@ -932,6 +932,11 @@ static ClusterState innerPutTemplate( templateBuilder.putAlias(aliasMetadata); } IndexTemplateMetadata template = templateBuilder.build(); + IndexTemplateMetadata existingTemplate = currentState.metadata().templates().get(request.name); + if (template.equals(existingTemplate)) { + // The template is unchanged, therefore there is no need for a cluster state update + return currentState; + } Metadata.Builder builder = Metadata.builder(currentState.metadata()).put(template); diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MetadataMappingService.java b/server/src/main/java/org/opensearch/cluster/metadata/MetadataMappingService.java index 9ed2a0f9257fc..3795961d39143 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/MetadataMappingService.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/MetadataMappingService.java @@ -57,17 +57,14 @@ import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.MapperService.MergeReason; import org.opensearch.indices.IndicesService; -import org.opensearch.indices.InvalidTypeNameException; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import static org.opensearch.index.mapper.MapperService.isMappingSourceTyped; import static org.opensearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason.NO_LONGER_ASSIGNED; /** @@ -188,31 +185,11 @@ private boolean refreshIndexMapping(IndexService indexService, IndexMetadata.Bui boolean dirty = false; String index = indexService.index().getName(); try { - List updatedTypes = new ArrayList<>(); MapperService mapperService = indexService.mapperService(); - for (DocumentMapper mapper : Arrays.asList( - mapperService.documentMapper(), - mapperService.documentMapper(MapperService.DEFAULT_MAPPING) - )) { - if (mapper != null) { - final String type = mapper.type(); - if (!mapper.mappingSource().equals(builder.mapping(type).source())) { - updatedTypes.add(type); - } - } - } - - // if a single type is not up-to-date, re-send everything - if (updatedTypes.isEmpty() == false) { - logger.warn("[{}] re-syncing mappings with cluster state because of types [{}]", index, updatedTypes); - dirty = true; - for (DocumentMapper mapper : Arrays.asList( - mapperService.documentMapper(), - mapperService.documentMapper(MapperService.DEFAULT_MAPPING) - )) { - if (mapper != null) { - builder.putMapping(new MappingMetadata(mapper)); - } + DocumentMapper mapper = mapperService.documentMapper(); + if (mapper != null) { + if (mapper.mappingSource().equals(builder.mapping().source()) == false) { + dirty = true; } } } catch (Exception e) { @@ -272,7 +249,6 @@ private ClusterState applyRequest( PutMappingClusterStateUpdateRequest request, Map indexMapperServices ) throws IOException { - String mappingType = request.type(); CompressedXContent mappingUpdateSource = new CompressedXContent(request.source()); final Metadata metadata = currentState.metadata(); final List updateList = new ArrayList<>(); @@ -286,43 +262,12 @@ private ClusterState applyRequest( // we used for the validation, it makes this mechanism little less scary (a little) updateList.add(indexMetadata); // try and parse it (no need to add it here) so we can bail early in case of parsing exception - DocumentMapper newMapper; DocumentMapper existingMapper = mapperService.documentMapper(); - - String typeForUpdate = mapperService.getTypeForUpdate(mappingType, mappingUpdateSource); - if (existingMapper != null && existingMapper.type().equals(typeForUpdate) == false) { - throw new IllegalArgumentException( - "Rejecting mapping update to [" - + mapperService.index().getName() - + "] as the final mapping would have more than 1 type: " - + Arrays.asList(existingMapper.type(), typeForUpdate) - ); - } - - if (MapperService.DEFAULT_MAPPING.equals(request.type())) { - // _default_ types do not go through merging, but we do test the new settings. Also don't apply the old default - newMapper = mapperService.parse(request.type(), mappingUpdateSource, false); - } else { - newMapper = mapperService.parse(request.type(), mappingUpdateSource, existingMapper == null); - if (existingMapper != null) { - // first, simulate: just call merge and ignore the result - existingMapper.merge(newMapper.mapping(), MergeReason.MAPPING_UPDATE); - } + DocumentMapper newMapper = mapperService.parse(MapperService.SINGLE_MAPPING_NAME, mappingUpdateSource); + if (existingMapper != null) { + // first, simulate: just call merge and ignore the result + existingMapper.merge(newMapper.mapping(), MergeReason.MAPPING_UPDATE); } - if (mappingType == null) { - mappingType = newMapper.type(); - } else if (mappingType.equals(newMapper.type()) == false - && (isMappingSourceTyped(request.type(), mappingUpdateSource) - || mapperService.resolveDocumentType(mappingType).equals(newMapper.type()) == false)) { - throw new InvalidTypeNameException("Type name provided does not match type name within mapping definition."); - } - } - assert mappingType != null; - - if (MapperService.DEFAULT_MAPPING.equals(mappingType) == false - && MapperService.SINGLE_MAPPING_NAME.equals(mappingType) == false - && mappingType.charAt(0) == '_') { - throw new InvalidTypeNameException("Document mapping type name can't start with '_', found: [" + mappingType + "]"); } Metadata.Builder builder = Metadata.builder(metadata); boolean updated = false; @@ -333,13 +278,16 @@ private ClusterState applyRequest( final Index index = indexMetadata.getIndex(); final MapperService mapperService = indexMapperServices.get(index); - String typeForUpdate = mapperService.getTypeForUpdate(mappingType, mappingUpdateSource); CompressedXContent existingSource = null; - DocumentMapper existingMapper = mapperService.documentMapper(typeForUpdate); + DocumentMapper existingMapper = mapperService.documentMapper(); if (existingMapper != null) { existingSource = existingMapper.mappingSource(); } - DocumentMapper mergedMapper = mapperService.merge(typeForUpdate, mappingUpdateSource, MergeReason.MAPPING_UPDATE); + DocumentMapper mergedMapper = mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + mappingUpdateSource, + MergeReason.MAPPING_UPDATE + ); CompressedXContent updatedSource = mergedMapper.mappingSource(); if (existingSource != null) { @@ -358,22 +306,18 @@ private ClusterState applyRequest( } else { updatedMapping = true; if (logger.isDebugEnabled()) { - logger.debug("{} create_mapping [{}] with source [{}]", index, mappingType, updatedSource); + logger.debug("{} create_mapping with source [{}]", index, updatedSource); } else if (logger.isInfoEnabled()) { - logger.info("{} create_mapping [{}]", index, mappingType); + logger.info("{} create_mapping", index); } } IndexMetadata.Builder indexMetadataBuilder = IndexMetadata.builder(indexMetadata); // Mapping updates on a single type may have side-effects on other types so we need to // update mapping metadata on all types - for (DocumentMapper mapper : Arrays.asList( - mapperService.documentMapper(), - mapperService.documentMapper(MapperService.DEFAULT_MAPPING) - )) { - if (mapper != null) { - indexMetadataBuilder.putMapping(new MappingMetadata(mapper.mappingSource())); - } + DocumentMapper mapper = mapperService.documentMapper(); + if (mapper != null) { + indexMetadataBuilder.putMapping(new MappingMetadata(mapper.mappingSource())); } if (updatedMapping) { indexMetadataBuilder.mappingVersion(1 + indexMetadataBuilder.mappingVersion()); @@ -392,11 +336,6 @@ private ClusterState applyRequest( return currentState; } } - - @Override - public String describeTasks(List tasks) { - return String.join(", ", tasks.stream().map(t -> (CharSequence) t.type())::iterator); - } } public void putMapping(final PutMappingClusterStateUpdateRequest request, final ActionListener listener) { diff --git a/server/src/main/java/org/opensearch/cluster/routing/RoutingNodes.java b/server/src/main/java/org/opensearch/cluster/routing/RoutingNodes.java index b5353382f06b8..2906ba783a5a5 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/RoutingNodes.java +++ b/server/src/main/java/org/opensearch/cluster/routing/RoutingNodes.java @@ -599,13 +599,11 @@ public void failShard( ensureMutable(); assert failedShard.assignedToNode() : "only assigned shards can be failed"; assert indexMetadata.getIndex().equals(failedShard.index()) : "shard failed for unknown index (shard entry: " + failedShard + ")"; - assert getByAllocationId( - failedShard.shardId(), - failedShard.allocationId().getId() - ) == failedShard : "shard routing to fail does not exist in routing table, expected: " - + failedShard - + " but was: " - + getByAllocationId(failedShard.shardId(), failedShard.allocationId().getId()); + assert getByAllocationId(failedShard.shardId(), failedShard.allocationId().getId()) == failedShard + : "shard routing to fail does not exist in routing table, expected: " + + failedShard + + " but was: " + + getByAllocationId(failedShard.shardId(), failedShard.allocationId().getId()); logger.debug("{} failing shard {} with unassigned info ({})", failedShard.shardId(), failedShard, unassignedInfo.shortSummary()); @@ -850,12 +848,8 @@ private void updateAssigned(ShardRouting oldShard, ShardRouting newShard) { + oldShard + " by shard with same shard id but was " + newShard; - assert oldShard.unassigned() == false - && newShard.unassigned() == false : "only assigned shards can be updated in list of assigned shards (prev: " - + oldShard - + ", new: " - + newShard - + ")"; + assert oldShard.unassigned() == false && newShard.unassigned() == false + : "only assigned shards can be updated in list of assigned shards (prev: " + oldShard + ", new: " + newShard + ")"; assert oldShard.currentNodeId().equals(newShard.currentNodeId()) : "shard to update " + oldShard + " can only update " diff --git a/server/src/main/java/org/opensearch/cluster/routing/ShardRouting.java b/server/src/main/java/org/opensearch/cluster/routing/ShardRouting.java index 84c5a6e26d6b9..36c7545c16c5c 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/ShardRouting.java +++ b/server/src/main/java/org/opensearch/cluster/routing/ShardRouting.java @@ -101,16 +101,13 @@ public final class ShardRouting implements Writeable, ToXContentObject { assert expectedShardSize == UNAVAILABLE_EXPECTED_SHARD_SIZE || state == ShardRoutingState.INITIALIZING || state == ShardRoutingState.RELOCATING : expectedShardSize + " state: " + state; - assert expectedShardSize >= 0 - || state != ShardRoutingState.INITIALIZING - || state != ShardRoutingState.RELOCATING : expectedShardSize + " state: " + state; + assert expectedShardSize >= 0 || state != ShardRoutingState.INITIALIZING || state != ShardRoutingState.RELOCATING + : expectedShardSize + " state: " + state; assert !(state == ShardRoutingState.UNASSIGNED && unassignedInfo == null) : "unassigned shard must be created with meta"; - assert (state == ShardRoutingState.UNASSIGNED - || state == ShardRoutingState.INITIALIZING) == (recoverySource != null) : "recovery source only available on unassigned or initializing shard but was " - + state; - assert recoverySource == null - || recoverySource == PeerRecoverySource.INSTANCE - || primary : "replica shards always recover from primary"; + assert (state == ShardRoutingState.UNASSIGNED || state == ShardRoutingState.INITIALIZING) == (recoverySource != null) + : "recovery source only available on unassigned or initializing shard but was " + state; + assert recoverySource == null || recoverySource == PeerRecoverySource.INSTANCE || primary + : "replica shards always recover from primary"; assert (currentNodeId == null) == (state == ShardRoutingState.UNASSIGNED) : "unassigned shard must not be assigned to a node " + this; } @@ -589,12 +586,8 @@ public ShardRouting moveUnassignedFromPrimary() { **/ public boolean isSameAllocation(ShardRouting other) { boolean b = this.allocationId != null && other.allocationId != null && this.allocationId.getId().equals(other.allocationId.getId()); - assert b == false - || this.currentNodeId.equals(other.currentNodeId) : "ShardRoutings have the same allocation id but not the same node. This [" - + this - + "], other [" - + other - + "]"; + assert b == false || this.currentNodeId.equals(other.currentNodeId) + : "ShardRoutings have the same allocation id but not the same node. This [" + this + "], other [" + other + "]"; return b; } @@ -613,50 +606,35 @@ public boolean isRelocationTargetOf(ShardRouting other) { && this.state == ShardRoutingState.INITIALIZING && this.allocationId.getId().equals(other.allocationId.getRelocationId()); - assert b == false - || other.state == ShardRoutingState.RELOCATING : "ShardRouting is a relocation target but the source shard state isn't relocating. This [" + assert b == false || other.state == ShardRoutingState.RELOCATING + : "ShardRouting is a relocation target but the source shard state isn't relocating. This [" + this + "], other [" + other + "]"; + + assert b == false || other.allocationId.getId().equals(this.allocationId.getRelocationId()) + : "ShardRouting is a relocation target but the source id isn't equal to source's allocationId.getRelocationId." + + " This [" + + this + + "], other [" + + other + + "]"; + + assert b == false || other.currentNodeId().equals(this.relocatingNodeId) + : "ShardRouting is a relocation target but source current node id isn't equal to target relocating node." + + " This [" + + this + + "], other [" + + other + + "]"; + + assert b == false || this.currentNodeId().equals(other.relocatingNodeId) + : "ShardRouting is a relocation target but current node id isn't equal to source relocating node." + + " This [" + this + "], other [" + other + "]"; - assert b == false - || other.allocationId.getId() - .equals( - this.allocationId.getRelocationId() - ) : "ShardRouting is a relocation target but the source id isn't equal to source's allocationId.getRelocationId." - + " This [" - + this - + "], other [" - + other - + "]"; - - assert b == false - || other.currentNodeId() - .equals( - this.relocatingNodeId - ) : "ShardRouting is a relocation target but source current node id isn't equal to target relocating node." - + " This [" - + this - + "], other [" - + other - + "]"; - - assert b == false - || this.currentNodeId() - .equals( - other.relocatingNodeId - ) : "ShardRouting is a relocation target but current node id isn't equal to source relocating node." - + " This [" - + this - + "], other [" - + other - + "]"; - - assert b == false - || this.shardId.equals( - other.shardId - ) : "ShardRouting is a relocation target but both indexRoutings are not of the same shard id." + assert b == false || this.shardId.equals(other.shardId) + : "ShardRouting is a relocation target but both indexRoutings are not of the same shard id." + " This [" + this + "], other [" @@ -680,48 +658,35 @@ public boolean isRelocationSourceOf(ShardRouting other) { && other.state == ShardRoutingState.INITIALIZING && other.allocationId.getId().equals(this.allocationId.getRelocationId()); - assert b == false - || this.state == ShardRoutingState.RELOCATING : "ShardRouting is a relocation source but shard state isn't relocating. This [" + assert b == false || this.state == ShardRoutingState.RELOCATING + : "ShardRouting is a relocation source but shard state isn't relocating. This [" + this + "], other [" + other + "]"; + + assert b == false || this.allocationId.getId().equals(other.allocationId.getRelocationId()) + : "ShardRouting is a relocation source but the allocation id isn't equal to other.allocationId.getRelocationId." + + " This [" + + this + + "], other [" + + other + + "]"; + + assert b == false || this.currentNodeId().equals(other.relocatingNodeId) + : "ShardRouting is a relocation source but current node isn't equal to other's relocating node." + + " This [" + + this + + "], other [" + + other + + "]"; + + assert b == false || other.currentNodeId().equals(this.relocatingNodeId) + : "ShardRouting is a relocation source but relocating node isn't equal to other's current node." + + " This [" + this + "], other [" + other + "]"; - assert b == false - || this.allocationId.getId() - .equals( - other.allocationId.getRelocationId() - ) : "ShardRouting is a relocation source but the allocation id isn't equal to other.allocationId.getRelocationId." - + " This [" - + this - + "], other [" - + other - + "]"; - - assert b == false - || this.currentNodeId() - .equals( - other.relocatingNodeId - ) : "ShardRouting is a relocation source but current node isn't equal to other's relocating node." - + " This [" - + this - + "], other [" - + other - + "]"; - - assert b == false - || other.currentNodeId() - .equals( - this.relocatingNodeId - ) : "ShardRouting is a relocation source but relocating node isn't equal to other's current node." - + " This [" - + this - + "], other [" - + other - + "]"; - - assert b == false - || this.shardId.equals(other.shardId) : "ShardRouting is a relocation source but both indexRoutings are not of the same shard." + assert b == false || this.shardId.equals(other.shardId) + : "ShardRouting is a relocation source but both indexRoutings are not of the same shard." + " This [" + this + "], target [" diff --git a/server/src/main/java/org/opensearch/cluster/routing/allocation/AllocationService.java b/server/src/main/java/org/opensearch/cluster/routing/allocation/AllocationService.java index 1680c13a72e0e..53ade0d6c5ae2 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/allocation/AllocationService.java +++ b/server/src/main/java/org/opensearch/cluster/routing/allocation/AllocationService.java @@ -528,8 +528,8 @@ private boolean hasDeadNodes(RoutingAllocation allocation) { private void reroute(RoutingAllocation allocation) { assert hasDeadNodes(allocation) == false : "dead nodes should be explicitly cleaned up. See disassociateDeadNodes"; - assert AutoExpandReplicas.getAutoExpandReplicaChanges(allocation.metadata(), allocation) - .isEmpty() : "auto-expand replicas out of sync with number of nodes in the cluster"; + assert AutoExpandReplicas.getAutoExpandReplicaChanges(allocation.metadata(), allocation).isEmpty() + : "auto-expand replicas out of sync with number of nodes in the cluster"; assert assertInitialized(); removeDelayMarkers(allocation); @@ -602,15 +602,13 @@ private void applyStartedShards(RoutingAllocation routingAllocation, List oldInSyncAllocationIds = oldIndexMetadata.inSyncAllocationIds(shardId.id()); @@ -217,9 +213,8 @@ private IndexMetadata.Builder updateInSyncAllocations( inSyncAllocationIds.removeAll(updates.removedAllocationIds); assert oldInSyncAllocationIds.contains(RecoverySource.ExistingStoreRecoverySource.FORCED_ALLOCATION_ID) == false - || inSyncAllocationIds.contains( - RecoverySource.ExistingStoreRecoverySource.FORCED_ALLOCATION_ID - ) == false : "fake allocation id has to be removed, inSyncAllocationIds:" + inSyncAllocationIds; + || inSyncAllocationIds.contains(RecoverySource.ExistingStoreRecoverySource.FORCED_ALLOCATION_ID) == false + : "fake allocation id has to be removed, inSyncAllocationIds:" + inSyncAllocationIds; // Prevent set of inSyncAllocationIds to grow unboundedly. This can happen for example if we don't write to a primary // but repeatedly shut down nodes that have active replicas. @@ -258,9 +253,8 @@ private IndexMetadata.Builder updateInSyncAllocations( inSyncAllocationIds.add(updates.firstFailedPrimary.allocationId().getId()); } - assert inSyncAllocationIds.isEmpty() == false - || oldInSyncAllocationIds.isEmpty() : "in-sync allocations cannot become empty after they have been non-empty: " - + oldInSyncAllocationIds; + assert inSyncAllocationIds.isEmpty() == false || oldInSyncAllocationIds.isEmpty() + : "in-sync allocations cannot become empty after they have been non-empty: " + oldInSyncAllocationIds; // be extra safe here and only update in-sync set if it is non-empty if (inSyncAllocationIds.isEmpty() == false) { @@ -295,11 +289,8 @@ public static ClusterState removeStaleIdsWithoutRoutings(ClusterState clusterSta int shardNumber = shardEntry.getKey().getId(); Set oldInSyncAllocations = oldIndexMetadata.inSyncAllocationIds(shardNumber); Set idsToRemove = shardEntry.getValue().stream().map(e -> e.getAllocationId()).collect(Collectors.toSet()); - assert idsToRemove.stream() - .allMatch(id -> oldRoutingTable.getByAllocationId(shardEntry.getKey(), id) == null) : "removing stale ids: " - + idsToRemove - + ", some of which have still a routing entry: " - + oldRoutingTable; + assert idsToRemove.stream().allMatch(id -> oldRoutingTable.getByAllocationId(shardEntry.getKey(), id) == null) + : "removing stale ids: " + idsToRemove + ", some of which have still a routing entry: " + oldRoutingTable; Set remainingInSyncAllocations = Sets.difference(oldInSyncAllocations, idsToRemove); assert remainingInSyncAllocations.isEmpty() == false : "Set of in-sync ids cannot become empty for shard " + shardEntry.getKey() diff --git a/server/src/main/java/org/opensearch/cluster/routing/allocation/RoutingNodesChangedObserver.java b/server/src/main/java/org/opensearch/cluster/routing/allocation/RoutingNodesChangedObserver.java index 411b862312845..7be75d5baf0b2 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/allocation/RoutingNodesChangedObserver.java +++ b/server/src/main/java/org/opensearch/cluster/routing/allocation/RoutingNodesChangedObserver.java @@ -91,9 +91,8 @@ public void relocationCompleted(ShardRouting removedRelocationSource) { @Override public void relocationSourceRemoved(ShardRouting removedReplicaRelocationSource) { - assert removedReplicaRelocationSource.primary() == false - && removedReplicaRelocationSource.isRelocationTarget() : "expected replica relocation target shard " - + removedReplicaRelocationSource; + assert removedReplicaRelocationSource.primary() == false && removedReplicaRelocationSource.isRelocationTarget() + : "expected replica relocation target shard " + removedReplicaRelocationSource; setChanged(); } @@ -108,11 +107,8 @@ public void initializedReplicaReinitialized(ShardRouting oldReplica, ShardRoutin assert oldReplica.initializing() && oldReplica.primary() == false : "expected initializing replica shard " + oldReplica; assert reinitializedReplica.initializing() && reinitializedReplica.primary() == false : "expected reinitialized replica shard " + reinitializedReplica; - assert oldReplica.allocationId() - .getId() - .equals( - reinitializedReplica.allocationId().getId() - ) == false : "expected allocation id to change for reinitialized replica shard (old: " + assert oldReplica.allocationId().getId().equals(reinitializedReplica.allocationId().getId()) == false + : "expected allocation id to change for reinitialized replica shard (old: " + oldReplica + " new: " + reinitializedReplica diff --git a/server/src/main/java/org/opensearch/cluster/service/ClusterService.java b/server/src/main/java/org/opensearch/cluster/service/ClusterService.java index bb51d9bc312ac..46d65f310a427 100644 --- a/server/src/main/java/org/opensearch/cluster/service/ClusterService.java +++ b/server/src/main/java/org/opensearch/cluster/service/ClusterService.java @@ -237,9 +237,8 @@ public ClusterApplierService getClusterApplierService() { public static boolean assertClusterOrMasterStateThread() { assert Thread.currentThread().getName().contains(ClusterApplierService.CLUSTER_UPDATE_THREAD_NAME) - || Thread.currentThread() - .getName() - .contains(MasterService.MASTER_UPDATE_THREAD_NAME) : "not called from the master/cluster state update thread"; + || Thread.currentThread().getName().contains(MasterService.MASTER_UPDATE_THREAD_NAME) + : "not called from the master/cluster state update thread"; return true; } diff --git a/server/src/main/java/org/opensearch/cluster/service/MasterService.java b/server/src/main/java/org/opensearch/cluster/service/MasterService.java index b971e8463bda9..7b0bede4c6c76 100644 --- a/server/src/main/java/org/opensearch/cluster/service/MasterService.java +++ b/server/src/main/java/org/opensearch/cluster/service/MasterService.java @@ -539,8 +539,9 @@ void notifySuccessfulTasksOnUnchangedClusterState() { */ public List pendingTasks() { return Arrays.stream(threadPoolExecutor.getPending()).map(pending -> { - assert pending.task instanceof SourcePrioritizedRunnable : "thread pool executor should only use SourcePrioritizedRunnable instances but found: " - + pending.task.getClass().getName(); + assert pending.task instanceof SourcePrioritizedRunnable + : "thread pool executor should only use SourcePrioritizedRunnable instances but found: " + + pending.task.getClass().getName(); SourcePrioritizedRunnable task = (SourcePrioritizedRunnable) pending.task; return new PendingClusterTask( pending.insertionOrder, diff --git a/server/src/main/java/org/opensearch/cluster/service/TaskBatcher.java b/server/src/main/java/org/opensearch/cluster/service/TaskBatcher.java index bc3fc11d631da..a0648e0d9ab12 100644 --- a/server/src/main/java/org/opensearch/cluster/service/TaskBatcher.java +++ b/server/src/main/java/org/opensearch/cluster/service/TaskBatcher.java @@ -71,9 +71,8 @@ public void submitTasks(List tasks, @Nullable TimeValue t return; } final BatchedTask firstTask = tasks.get(0); - assert tasks.stream() - .allMatch(t -> t.batchingKey == firstTask.batchingKey) : "tasks submitted in a batch should share the same batching key: " - + tasks; + assert tasks.stream().allMatch(t -> t.batchingKey == firstTask.batchingKey) + : "tasks submitted in a batch should share the same batching key: " + tasks; // convert to an identity map to check for dups based on task identity final Map tasksIdentity = tasks.stream() .collect( @@ -124,8 +123,8 @@ private void onTimeoutInternal(List tasks, TimeValue time if (toRemove.isEmpty() == false) { BatchedTask firstTask = toRemove.get(0); Object batchingKey = firstTask.batchingKey; - assert tasks.stream() - .allMatch(t -> t.batchingKey == batchingKey) : "tasks submitted in a batch should share the same batching key: " + tasks; + assert tasks.stream().allMatch(t -> t.batchingKey == batchingKey) + : "tasks submitted in a batch should share the same batching key: " + tasks; synchronized (tasksPerBatchingKey) { LinkedHashSet existingTasks = tasksPerBatchingKey.get(batchingKey); if (existingTasks != null) { diff --git a/server/src/main/java/org/opensearch/common/LocalTimeOffset.java b/server/src/main/java/org/opensearch/common/LocalTimeOffset.java index d07b13ba0e123..94347c47e56e0 100644 --- a/server/src/main/java/org/opensearch/common/LocalTimeOffset.java +++ b/server/src/main/java/org/opensearch/common/LocalTimeOffset.java @@ -570,12 +570,8 @@ protected static Transition buildTransition(ZoneOffsetTransition transition, Loc long utcStart = transition.toEpochSecond() * 1000; long offsetBeforeMillis = transition.getOffsetBefore().getTotalSeconds() * 1000; long offsetAfterMillis = transition.getOffsetAfter().getTotalSeconds() * 1000; - assert (false == previous instanceof Transition) - || ((Transition) previous).startUtcMillis < utcStart : "transition list out of order at [" - + previous - + "] and [" - + transition - + "]"; + assert (false == previous instanceof Transition) || ((Transition) previous).startUtcMillis < utcStart + : "transition list out of order at [" + previous + "] and [" + transition + "]"; assert previous.millis != offsetAfterMillis : "transition list is has a duplicate at [" + previous + "] and [" diff --git a/server/src/main/java/org/opensearch/common/bytes/ReleasableBytesReference.java b/server/src/main/java/org/opensearch/common/bytes/ReleasableBytesReference.java index e9466b47c3d5b..9ed47ef6cbf39 100644 --- a/server/src/main/java/org/opensearch/common/bytes/ReleasableBytesReference.java +++ b/server/src/main/java/org/opensearch/common/bytes/ReleasableBytesReference.java @@ -34,9 +34,9 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefIterator; +import org.opensearch.common.concurrent.RefCountedReleasable; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.lease.Releasable; -import org.opensearch.common.util.concurrent.AbstractRefCounted; import org.opensearch.common.xcontent.XContentBuilder; import java.io.IOException; @@ -50,14 +50,14 @@ public final class ReleasableBytesReference implements Releasable, BytesReferenc public static final Releasable NO_OP = () -> {}; private final BytesReference delegate; - private final AbstractRefCounted refCounted; + private final RefCountedReleasable refCounted; public ReleasableBytesReference(BytesReference delegate, Releasable releasable) { this.delegate = delegate; - this.refCounted = new RefCountedReleasable(releasable); + this.refCounted = new RefCountedReleasable<>("bytes-reference", releasable, releasable::close); } - private ReleasableBytesReference(BytesReference delegate, AbstractRefCounted refCounted) { + private ReleasableBytesReference(BytesReference delegate, RefCountedReleasable refCounted) { this.delegate = delegate; this.refCounted = refCounted; refCounted.incRef(); @@ -82,7 +82,7 @@ public ReleasableBytesReference retainedSlice(int from, int length) { @Override public void close() { - refCounted.decRef(); + refCounted.close(); } @Override @@ -164,19 +164,4 @@ public boolean equals(Object obj) { public int hashCode() { return delegate.hashCode(); } - - private static final class RefCountedReleasable extends AbstractRefCounted { - - private final Releasable releasable; - - RefCountedReleasable(Releasable releasable) { - super("bytes-reference"); - this.releasable = releasable; - } - - @Override - protected void closeInternal() { - releasable.close(); - } - } } diff --git a/server/src/main/java/org/opensearch/common/compress/CompressedXContent.java b/server/src/main/java/org/opensearch/common/compress/CompressedXContent.java index e883b3739c9da..f15e213b9a773 100644 --- a/server/src/main/java/org/opensearch/common/compress/CompressedXContent.java +++ b/server/src/main/java/org/opensearch/common/compress/CompressedXContent.java @@ -41,7 +41,6 @@ import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import java.io.IOException; import java.io.OutputStream; @@ -82,15 +81,19 @@ private CompressedXContent(byte[] compressed, int crc32) { /** * Create a {@link CompressedXContent} out of a {@link ToXContent} instance. */ - public CompressedXContent(ToXContent xcontent, XContentType type, ToXContent.Params params) throws IOException { + public CompressedXContent(ToXContent xcontent, ToXContent.Params params) throws IOException { BytesStreamOutput bStream = new BytesStreamOutput(); OutputStream compressedStream = CompressorFactory.COMPRESSOR.threadLocalOutputStream(bStream); CRC32 crc32 = new CRC32(); OutputStream checkedStream = new CheckedOutputStream(compressedStream, crc32); - try (XContentBuilder builder = XContentFactory.contentBuilder(type, checkedStream)) { - builder.startObject(); + try (XContentBuilder builder = XContentFactory.jsonBuilder(checkedStream)) { + if (xcontent.isFragment()) { + builder.startObject(); + } xcontent.toXContent(builder, params); - builder.endObject(); + if (xcontent.isFragment()) { + builder.endObject(); + } } this.bytes = BytesReference.toBytes(bStream.bytes()); this.crc32 = (int) crc32.getValue(); diff --git a/server/src/main/java/org/opensearch/common/concurrent/GatedAutoCloseable.java b/server/src/main/java/org/opensearch/common/concurrent/GatedAutoCloseable.java new file mode 100644 index 0000000000000..cb819c0320e91 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/concurrent/GatedAutoCloseable.java @@ -0,0 +1,43 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +/** + * Decorator class that wraps an object reference with a {@link Runnable} that is + * invoked when {@link #close()} is called. The internal {@link OneWayGate} instance ensures + * that this is invoked only once. See also {@link GatedCloseable} + */ +public class GatedAutoCloseable implements AutoCloseable { + + private final T ref; + private final Runnable onClose; + private final OneWayGate gate; + + public GatedAutoCloseable(T ref, Runnable onClose) { + this.ref = ref; + this.onClose = onClose; + gate = new OneWayGate(); + } + + public T get() { + return ref; + } + + @Override + public void close() { + if (gate.close()) { + onClose.run(); + } + } +} diff --git a/server/src/main/java/org/opensearch/common/concurrent/GatedCloseable.java b/server/src/main/java/org/opensearch/common/concurrent/GatedCloseable.java new file mode 100644 index 0000000000000..d98e4cca8d561 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/concurrent/GatedCloseable.java @@ -0,0 +1,48 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +import org.opensearch.common.CheckedRunnable; + +import java.io.Closeable; +import java.io.IOException; + +/** + * Decorator class that wraps an object reference with a {@link CheckedRunnable} that is + * invoked when {@link #close()} is called. The internal {@link OneWayGate} instance ensures + * that this is invoked only once. See also {@link GatedAutoCloseable} + */ +public class GatedCloseable implements Closeable { + + private final T ref; + private final CheckedRunnable onClose; + private final OneWayGate gate; + + public GatedCloseable(T ref, CheckedRunnable onClose) { + this.ref = ref; + this.onClose = onClose; + gate = new OneWayGate(); + } + + public T get() { + return ref; + } + + @Override + public void close() throws IOException { + if (gate.close()) { + onClose.run(); + } + } +} diff --git a/server/src/main/java/org/opensearch/common/concurrent/OneWayGate.java b/server/src/main/java/org/opensearch/common/concurrent/OneWayGate.java new file mode 100644 index 0000000000000..76625094f3ca6 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/concurrent/OneWayGate.java @@ -0,0 +1,43 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * Encapsulates logic for a one-way gate. Guarantees idempotency via the {@link AtomicBoolean} instance + * and the return value of the {@link #close()} function. + */ +public class OneWayGate { + + private final AtomicBoolean closed = new AtomicBoolean(); + + /** + * Closes the gate and sets the internal boolean value in an idempotent + * fashion. This is a one-way operation and cannot be reset. + * @return true if the gate was closed in this invocation, + * false if the gate was already closed + */ + public boolean close() { + return closed.compareAndSet(false, true); + } + + /** + * Indicates if the gate has been closed. + * @return true if the gate is closed, false otherwise + */ + public boolean isClosed() { + return closed.get(); + } +} diff --git a/server/src/main/java/org/opensearch/common/concurrent/RefCountedReleasable.java b/server/src/main/java/org/opensearch/common/concurrent/RefCountedReleasable.java new file mode 100644 index 0000000000000..975f2295d7c32 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/concurrent/RefCountedReleasable.java @@ -0,0 +1,48 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +import org.opensearch.common.lease.Releasable; +import org.opensearch.common.util.concurrent.AbstractRefCounted; + +/** + * Decorator class that wraps an object reference as a {@link AbstractRefCounted} instance. + * In addition to a {@link String} name, it accepts a {@link Runnable} shutdown hook that is + * invoked when the reference count reaches zero i.e. on {@link #closeInternal()}. + */ +public class RefCountedReleasable extends AbstractRefCounted implements Releasable { + + private final T ref; + private final Runnable shutdownRunnable; + + public RefCountedReleasable(String name, T ref, Runnable shutdownRunnable) { + super(name); + this.ref = ref; + this.shutdownRunnable = shutdownRunnable; + } + + @Override + public void close() { + decRef(); + } + + public T get() { + return ref; + } + + @Override + protected void closeInternal() { + shutdownRunnable.run(); + } +} diff --git a/server/src/main/java/org/opensearch/common/inject/BindingProcessor.java b/server/src/main/java/org/opensearch/common/inject/BindingProcessor.java index 948e1a4e6eb37..671123f2df767 100644 --- a/server/src/main/java/org/opensearch/common/inject/BindingProcessor.java +++ b/server/src/main/java/org/opensearch/common/inject/BindingProcessor.java @@ -293,7 +293,7 @@ private boolean isOkayDuplicate(Binding original, BindingImpl binding) { return false; } - // It's unfortunate that we have to maintain a blacklist of specific + // It's unfortunate that we have to maintain a denylist of specific // classes, but we can't easily block the whole package because of // all our unit tests. private static final Set> FORBIDDEN_TYPES = unmodifiableSet( diff --git a/server/src/main/java/org/opensearch/common/inject/InheritingState.java b/server/src/main/java/org/opensearch/common/inject/InheritingState.java index 3d821114ff4b4..70a2fb335cca5 100644 --- a/server/src/main/java/org/opensearch/common/inject/InheritingState.java +++ b/server/src/main/java/org/opensearch/common/inject/InheritingState.java @@ -61,7 +61,7 @@ class InheritingState implements State { private final Map, Scope> scopes = new HashMap<>(); private final List converters = new ArrayList<>(); private final List listenerBindings = new ArrayList<>(); - private WeakKeySet blacklistedKeys = new WeakKeySet(); + private WeakKeySet denylistedKeys = new WeakKeySet(); private final Object lock; InheritingState(State parent) { @@ -145,17 +145,17 @@ public List getTypeListenerBindings() { @Override public void blacklist(Key key) { parent.blacklist(key); - blacklistedKeys.add(key); + denylistedKeys.add(key); } @Override public boolean isBlacklisted(Key key) { - return blacklistedKeys.contains(key); + return denylistedKeys.contains(key); } @Override public void clearBlacklisted() { - blacklistedKeys = new WeakKeySet(); + denylistedKeys = new WeakKeySet(); } @Override diff --git a/server/src/main/java/org/opensearch/common/inject/State.java b/server/src/main/java/org/opensearch/common/inject/State.java index 497c7d4d51e08..6a69e9547d707 100644 --- a/server/src/main/java/org/opensearch/common/inject/State.java +++ b/server/src/main/java/org/opensearch/common/inject/State.java @@ -164,7 +164,7 @@ public Object lock() { /** * Forbids the corresponding injector from creating a binding to {@code key}. Child injectors - * blacklist their bound keys on their parent injectors to prevent just-in-time bindings on the + * denylist their bound keys on their parent injectors to prevent just-in-time bindings on the * parent injector that would conflict. */ void blacklist(Key key); @@ -177,11 +177,11 @@ public Object lock() { /** * Returns the shared lock for all injector data. This is a low-granularity, high-contention lock - * to be used when reading mutable data (ie. just-in-time bindings, and binding blacklists). + * to be used when reading mutable data (ie. just-in-time bindings, and binding denylists). */ Object lock(); - // ES_GUICE: clean blacklist keys + // ES_GUICE: clean denylist keys void clearBlacklisted(); void makeAllBindingsToEagerSingletons(Injector injector); diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/FieldValueFactorFunction.java b/server/src/main/java/org/opensearch/common/lucene/search/function/FieldValueFactorFunction.java index a015b24d73e5a..3233fc9f8cecc 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/function/FieldValueFactorFunction.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/FieldValueFactorFunction.java @@ -35,6 +35,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Explanation; import org.opensearch.OpenSearchException; +import org.opensearch.common.Nullable; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.io.stream.Writeable; @@ -55,6 +56,8 @@ public class FieldValueFactorFunction extends ScoreFunction { private final String field; private final float boostFactor; private final Modifier modifier; + private final String functionName; + /** * Value used if the document is missing the field. */ @@ -67,6 +70,17 @@ public FieldValueFactorFunction( Modifier modifierType, Double missing, IndexNumericFieldData indexFieldData + ) { + this(field, boostFactor, modifierType, missing, indexFieldData, null); + } + + public FieldValueFactorFunction( + String field, + float boostFactor, + Modifier modifierType, + Double missing, + IndexNumericFieldData indexFieldData, + @Nullable String functionName ) { super(CombineFunction.MULTIPLY); this.field = field; @@ -74,6 +88,7 @@ public FieldValueFactorFunction( this.modifier = modifierType; this.indexFieldData = indexFieldData; this.missing = missing; + this.functionName = functionName; } @Override @@ -127,7 +142,7 @@ public Explanation explainScore(int docId, Explanation subQueryScore) throws IOE (float) score, String.format( Locale.ROOT, - "field value function: %s(doc['%s'].value%s * factor=%s)", + "field value function" + Functions.nameOrEmptyFunc(functionName) + ": %s(doc['%s'].value%s * factor=%s)", modifierStr, field, defaultStr, diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java b/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java index 36ecf690862cc..f7b91db2e712f 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java @@ -46,6 +46,7 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; import org.opensearch.OpenSearchException; +import org.opensearch.common.Nullable; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.io.stream.Writeable; @@ -70,11 +71,28 @@ public class FunctionScoreQuery extends Query { public static class FilterScoreFunction extends ScoreFunction { public final Query filter; public final ScoreFunction function; + public final String queryName; + /** + * Creates a FilterScoreFunction with query and function. + * @param filter filter query + * @param function score function + */ public FilterScoreFunction(Query filter, ScoreFunction function) { + this(filter, function, null); + } + + /** + * Creates a FilterScoreFunction with query and function. + * @param filter filter query + * @param function score function + * @param queryName filter query name + */ + public FilterScoreFunction(Query filter, ScoreFunction function, @Nullable String queryName) { super(function.getDefaultScoreCombiner()); this.filter = filter; this.function = function; + this.queryName = queryName; } @Override @@ -93,12 +111,14 @@ protected boolean doEquals(ScoreFunction other) { return false; } FilterScoreFunction that = (FilterScoreFunction) other; - return Objects.equals(this.filter, that.filter) && Objects.equals(this.function, that.function); + return Objects.equals(this.filter, that.filter) + && Objects.equals(this.function, that.function) + && Objects.equals(this.queryName, that.queryName); } @Override protected int doHashCode() { - return Objects.hash(filter, function); + return Objects.hash(filter, function, queryName); } @Override @@ -107,7 +127,7 @@ protected ScoreFunction rewrite(IndexReader reader) throws IOException { if (newFilter == filter) { return this; } - return new FilterScoreFunction(newFilter, function); + return new FilterScoreFunction(newFilter, function, queryName); } @Override @@ -144,6 +164,7 @@ public static ScoreMode fromString(String scoreMode) { final float maxBoost; private final Float minScore; private final CombineFunction combineFunction; + private final String queryName; /** * Creates a FunctionScoreQuery without function. @@ -152,7 +173,18 @@ public static ScoreMode fromString(String scoreMode) { * @param maxBoost The maximum applicable boost. */ public FunctionScoreQuery(Query subQuery, Float minScore, float maxBoost) { - this(subQuery, ScoreMode.FIRST, new ScoreFunction[0], CombineFunction.MULTIPLY, minScore, maxBoost); + this(subQuery, null, minScore, maxBoost); + } + + /** + * Creates a FunctionScoreQuery without function. + * @param subQuery The query to match. + * @param queryName filter query name + * @param minScore The minimum score to consider a document. + * @param maxBoost The maximum applicable boost. + */ + public FunctionScoreQuery(Query subQuery, @Nullable String queryName, Float minScore, float maxBoost) { + this(subQuery, queryName, ScoreMode.FIRST, new ScoreFunction[0], CombineFunction.MULTIPLY, minScore, maxBoost); } /** @@ -161,7 +193,17 @@ public FunctionScoreQuery(Query subQuery, Float minScore, float maxBoost) { * @param function The {@link ScoreFunction} to apply. */ public FunctionScoreQuery(Query subQuery, ScoreFunction function) { - this(subQuery, function, CombineFunction.MULTIPLY, null, DEFAULT_MAX_BOOST); + this(subQuery, null, function); + } + + /** + * Creates a FunctionScoreQuery with a single {@link ScoreFunction} + * @param subQuery The query to match. + * @param queryName filter query name + * @param function The {@link ScoreFunction} to apply. + */ + public FunctionScoreQuery(Query subQuery, @Nullable String queryName, ScoreFunction function) { + this(subQuery, queryName, function, CombineFunction.MULTIPLY, null, DEFAULT_MAX_BOOST); } /** @@ -173,12 +215,53 @@ public FunctionScoreQuery(Query subQuery, ScoreFunction function) { * @param maxBoost The maximum applicable boost. */ public FunctionScoreQuery(Query subQuery, ScoreFunction function, CombineFunction combineFunction, Float minScore, float maxBoost) { - this(subQuery, ScoreMode.FIRST, new ScoreFunction[] { function }, combineFunction, minScore, maxBoost); + this(subQuery, null, function, combineFunction, minScore, maxBoost); + } + + /** + * Creates a FunctionScoreQuery with a single function + * @param subQuery The query to match. + * @param queryName filter query name + * @param function The {@link ScoreFunction} to apply. + * @param combineFunction Defines how the query and function score should be applied. + * @param minScore The minimum score to consider a document. + * @param maxBoost The maximum applicable boost. + */ + public FunctionScoreQuery( + Query subQuery, + @Nullable String queryName, + ScoreFunction function, + CombineFunction combineFunction, + Float minScore, + float maxBoost + ) { + this(subQuery, queryName, ScoreMode.FIRST, new ScoreFunction[] { function }, combineFunction, minScore, maxBoost); + } + + /** + * Creates a FunctionScoreQuery with multiple score functions + * @param subQuery The query to match. + * @param scoreMode Defines how the different score functions should be combined. + * @param functions The {@link ScoreFunction}s to apply. + * @param combineFunction Defines how the query and function score should be applied. + * @param minScore The minimum score to consider a document. + * @param maxBoost The maximum applicable boost. + */ + public FunctionScoreQuery( + Query subQuery, + ScoreMode scoreMode, + ScoreFunction[] functions, + CombineFunction combineFunction, + Float minScore, + float maxBoost + ) { + this(subQuery, null, scoreMode, functions, combineFunction, minScore, maxBoost); } /** * Creates a FunctionScoreQuery with multiple score functions * @param subQuery The query to match. + * @param queryName filter query name * @param scoreMode Defines how the different score functions should be combined. * @param functions The {@link ScoreFunction}s to apply. * @param combineFunction Defines how the query and function score should be applied. @@ -187,6 +270,7 @@ public FunctionScoreQuery(Query subQuery, ScoreFunction function, CombineFunctio */ public FunctionScoreQuery( Query subQuery, + @Nullable String queryName, ScoreMode scoreMode, ScoreFunction[] functions, CombineFunction combineFunction, @@ -197,6 +281,7 @@ public FunctionScoreQuery( throw new IllegalArgumentException("Score function should not be null"); } this.subQuery = subQuery; + this.queryName = queryName; this.scoreMode = scoreMode; this.functions = functions; this.maxBoost = maxBoost; @@ -240,7 +325,7 @@ public Query rewrite(IndexReader reader) throws IOException { needsRewrite |= (newFunctions[i] != functions[i]); } if (needsRewrite) { - return new FunctionScoreQuery(newQ, scoreMode, newFunctions, combineFunction, minScore, maxBoost); + return new FunctionScoreQuery(newQ, queryName, scoreMode, newFunctions, combineFunction, minScore, maxBoost); } return this; } @@ -332,8 +417,7 @@ public Scorer scorer(LeafReaderContext context) throws IOException { @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - - Explanation expl = subQueryWeight.explain(context, doc); + Explanation expl = Functions.explainWithName(subQueryWeight.explain(context, doc), queryName); if (!expl.isMatch()) { return expl; } @@ -355,11 +439,15 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio Explanation functionExplanation = function.getLeafScoreFunction(context).explainScore(doc, expl); if (function instanceof FilterScoreFunction) { float factor = functionExplanation.getValue().floatValue(); - Query filterQuery = ((FilterScoreFunction) function).filter; + final FilterScoreFunction filterScoreFunction = (FilterScoreFunction) function; + Query filterQuery = filterScoreFunction.filter; Explanation filterExplanation = Explanation.match( factor, "function score, product of:", - Explanation.match(1.0f, "match filter: " + filterQuery.toString()), + Explanation.match( + 1.0f, + "match filter" + Functions.nameOrEmptyFunc(filterScoreFunction.queryName) + ": " + filterQuery.toString() + ), functionExplanation ); functionsExplanations.add(filterExplanation); @@ -543,11 +631,12 @@ public boolean equals(Object o) { && Objects.equals(this.combineFunction, other.combineFunction) && Objects.equals(this.minScore, other.minScore) && Objects.equals(this.scoreMode, other.scoreMode) - && Arrays.equals(this.functions, other.functions); + && Arrays.equals(this.functions, other.functions) + && Objects.equals(this.queryName, other.queryName); } @Override public int hashCode() { - return Objects.hash(classHash(), subQuery, maxBoost, combineFunction, minScore, scoreMode, Arrays.hashCode(functions)); + return Objects.hash(classHash(), subQuery, maxBoost, combineFunction, minScore, scoreMode, Arrays.hashCode(functions), queryName); } } diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/Functions.java b/server/src/main/java/org/opensearch/common/lucene/search/function/Functions.java new file mode 100644 index 0000000000000..a9de8ead31e2a --- /dev/null +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/Functions.java @@ -0,0 +1,66 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.lucene.search.function; + +import org.apache.lucene.search.Explanation; +import org.opensearch.common.Strings; +import org.opensearch.index.query.AbstractQueryBuilder; +import org.opensearch.index.query.functionscore.FunctionScoreQueryBuilder; + +/** + * Helper utility class for functions + */ +public final class Functions { + private Functions() {} + + /** + * Return function name wrapped into brackets or empty string, for example: '(_name: func1)' + * @param functionName function name + * @return function name wrapped into brackets or empty string + */ + public static String nameOrEmptyFunc(final String functionName) { + if (!Strings.isNullOrEmpty(functionName)) { + return "(" + AbstractQueryBuilder.NAME_FIELD.getPreferredName() + ": " + functionName + ")"; + } else { + return ""; + } + } + + /** + * Return function name as an argument or empty string, for example: ', _name: func1' + * @param functionName function name + * @return function name as an argument or empty string + */ + public static String nameOrEmptyArg(final String functionName) { + if (!Strings.isNullOrEmpty(functionName)) { + return ", " + FunctionScoreQueryBuilder.NAME_FIELD.getPreferredName() + ": " + functionName; + } else { + return ""; + } + } + + /** + * Enrich explanation with query name + * @param explanation explanation + * @param queryName query name + * @return explanation enriched with query name + */ + public static Explanation explainWithName(Explanation explanation, String queryName) { + if (Strings.isNullOrEmpty(queryName)) { + return explanation; + } else { + final String description = explanation.getDescription() + " " + nameOrEmptyFunc(queryName); + if (explanation.isMatch()) { + return Explanation.match(explanation.getValue(), description, explanation.getDetails()); + } else { + return Explanation.noMatch(description, explanation.getDetails()); + } + } + } +} diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/RandomScoreFunction.java b/server/src/main/java/org/opensearch/common/lucene/search/function/RandomScoreFunction.java index 78df111393394..f4fcda47b0078 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/function/RandomScoreFunction.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/RandomScoreFunction.java @@ -35,6 +35,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Explanation; import org.apache.lucene.util.StringHelper; +import org.opensearch.common.Nullable; import org.opensearch.index.fielddata.IndexFieldData; import org.opensearch.index.fielddata.LeafFieldData; import org.opensearch.index.fielddata.SortedBinaryDocValues; @@ -50,6 +51,7 @@ public class RandomScoreFunction extends ScoreFunction { private final int originalSeed; private final int saltedSeed; private final IndexFieldData fieldData; + private final String functionName; /** * Creates a RandomScoreFunction. @@ -59,10 +61,23 @@ public class RandomScoreFunction extends ScoreFunction { * @param uidFieldData The field data for _uid to use for generating consistent random values for the same id */ public RandomScoreFunction(int seed, int salt, IndexFieldData uidFieldData) { + this(seed, salt, uidFieldData, null); + } + + /** + * Creates a RandomScoreFunction. + * + * @param seed A seed for randomness + * @param salt A value to salt the seed with, ideally unique to the running node/index + * @param uidFieldData The field data for _uid to use for generating consistent random values for the same id + * @param functionName The function name + */ + public RandomScoreFunction(int seed, int salt, IndexFieldData uidFieldData, @Nullable String functionName) { super(CombineFunction.MULTIPLY); this.originalSeed = seed; this.saltedSeed = BitMixer.mix(seed, salt); this.fieldData = uidFieldData; + this.functionName = functionName; } @Override @@ -97,7 +112,7 @@ public Explanation explainScore(int docId, Explanation subQueryScore) throws IOE String field = fieldData == null ? null : fieldData.getFieldName(); return Explanation.match( (float) score(docId, subQueryScore.getValue().floatValue()), - "random score function (seed: " + originalSeed + ", field: " + field + ")" + "random score function (seed: " + originalSeed + ", field: " + field + Functions.nameOrEmptyArg(functionName) + ")" ); } }; diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreFunction.java b/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreFunction.java index 5ce50844b3dcc..3a7cc970908a5 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreFunction.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreFunction.java @@ -39,6 +39,7 @@ import org.opensearch.script.ScoreScript; import org.opensearch.script.Script; import org.opensearch.Version; +import org.opensearch.common.Nullable; import java.io.IOException; import java.util.Objects; @@ -67,14 +68,23 @@ public float score() { private final int shardId; private final String indexName; private final Version indexVersion; - - public ScriptScoreFunction(Script sScript, ScoreScript.LeafFactory script, String indexName, int shardId, Version indexVersion) { + private final String functionName; + + public ScriptScoreFunction( + Script sScript, + ScoreScript.LeafFactory script, + String indexName, + int shardId, + Version indexVersion, + @Nullable String functionName + ) { super(CombineFunction.REPLACE); this.sScript = sScript; this.script = script; this.indexName = indexName; this.shardId = shardId; this.indexVersion = indexVersion; + this.functionName = functionName; } @Override @@ -105,11 +115,15 @@ public Explanation explainScore(int docId, Explanation subQueryScore) throws IOE leafScript.setDocument(docId); scorer.docid = docId; scorer.score = subQueryScore.getValue().floatValue(); - exp = ((ExplainableScoreScript) leafScript).explain(subQueryScore); + exp = ((ExplainableScoreScript) leafScript).explain(subQueryScore, functionName); } else { double score = score(docId, subQueryScore.getValue().floatValue()); // info about params already included in sScript - String explanation = "script score function, computed with script:\"" + sScript + "\""; + String explanation = "script score function" + + Functions.nameOrEmptyFunc(functionName) + + ", computed with script:\"" + + sScript + + "\""; Explanation scoreExp = Explanation.match(subQueryScore.getValue(), "_score: ", subQueryScore); return Explanation.match((float) score, explanation, scoreExp); } diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java b/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java index 7d9f293b0c17b..44c76e74d5a41 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java @@ -50,6 +50,7 @@ import org.apache.lucene.search.BulkScorer; import org.apache.lucene.util.Bits; import org.opensearch.Version; +import org.opensearch.common.Nullable; import org.opensearch.script.ScoreScript; import org.opensearch.script.ScoreScript.ExplanationHolder; import org.opensearch.script.Script; @@ -69,6 +70,7 @@ public class ScriptScoreQuery extends Query { private final String indexName; private final int shardId; private final Version indexVersion; + private final String queryName; public ScriptScoreQuery( Query subQuery, @@ -78,8 +80,22 @@ public ScriptScoreQuery( String indexName, int shardId, Version indexVersion + ) { + this(subQuery, null, script, scriptBuilder, minScore, indexName, shardId, indexVersion); + } + + public ScriptScoreQuery( + Query subQuery, + @Nullable String queryName, + Script script, + ScoreScript.LeafFactory scriptBuilder, + Float minScore, + String indexName, + int shardId, + Version indexVersion ) { this.subQuery = subQuery; + this.queryName = queryName; this.script = script; this.scriptBuilder = scriptBuilder; this.minScore = minScore; @@ -92,7 +108,7 @@ public ScriptScoreQuery( public Query rewrite(IndexReader reader) throws IOException { Query newQ = subQuery.rewrite(reader); if (newQ != subQuery) { - return new ScriptScoreQuery(newQ, script, scriptBuilder, minScore, indexName, shardId, indexVersion); + return new ScriptScoreQuery(newQ, queryName, script, scriptBuilder, minScore, indexName, shardId, indexVersion); } return super.rewrite(reader); } @@ -140,7 +156,7 @@ public Scorer scorer(LeafReaderContext context) throws IOException { @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - Explanation subQueryExplanation = subQueryWeight.explain(context, doc); + Explanation subQueryExplanation = Functions.explainWithName(subQueryWeight.explain(context, doc), queryName); if (subQueryExplanation.isMatch() == false) { return subQueryExplanation; } @@ -210,7 +226,8 @@ public void visit(QueryVisitor visitor) { @Override public String toString(String field) { StringBuilder sb = new StringBuilder(); - sb.append("script_score (").append(subQuery.toString(field)).append(", script: "); + sb.append("script_score (").append(subQuery.toString(field)); + sb.append(Functions.nameOrEmptyArg(queryName)).append(", script: "); sb.append("{" + script.toString() + "}"); return sb.toString(); } @@ -225,12 +242,13 @@ public boolean equals(Object o) { && script.equals(that.script) && Objects.equals(minScore, that.minScore) && indexName.equals(that.indexName) - && indexVersion.equals(that.indexVersion); + && indexVersion.equals(that.indexVersion) + && Objects.equals(queryName, that.queryName); } @Override public int hashCode() { - return Objects.hash(subQuery, script, minScore, indexName, shardId, indexVersion); + return Objects.hash(subQuery, script, minScore, indexName, shardId, indexVersion, queryName); } private static class ScriptScorer extends Scorer { diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/WeightFactorFunction.java b/server/src/main/java/org/opensearch/common/lucene/search/function/WeightFactorFunction.java index 9ef33efdfd9f5..71968a0545cff 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/function/WeightFactorFunction.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/WeightFactorFunction.java @@ -34,6 +34,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Explanation; +import org.opensearch.common.Nullable; +import org.opensearch.common.Strings; import java.io.IOException; import java.util.Objects; @@ -45,9 +47,17 @@ public class WeightFactorFunction extends ScoreFunction { private float weight = 1.0f; public WeightFactorFunction(float weight, ScoreFunction scoreFunction) { + this(weight, scoreFunction, null); + } + + public WeightFactorFunction(float weight, ScoreFunction scoreFunction, @Nullable String functionName) { super(CombineFunction.MULTIPLY); if (scoreFunction == null) { - this.scoreFunction = SCORE_ONE; + if (Strings.isNullOrEmpty(functionName)) { + this.scoreFunction = SCORE_ONE; + } else { + this.scoreFunction = new ScoreOne(CombineFunction.MULTIPLY, functionName); + } } else { this.scoreFunction = scoreFunction; } @@ -55,9 +65,11 @@ public WeightFactorFunction(float weight, ScoreFunction scoreFunction) { } public WeightFactorFunction(float weight) { - super(CombineFunction.MULTIPLY); - this.scoreFunction = SCORE_ONE; - this.weight = weight; + this(weight, null, null); + } + + public WeightFactorFunction(float weight, @Nullable String functionName) { + this(weight, null, functionName); } @Override @@ -112,9 +124,15 @@ protected int doHashCode() { } private static class ScoreOne extends ScoreFunction { + private final String functionName; protected ScoreOne(CombineFunction scoreCombiner) { + this(scoreCombiner, null); + } + + protected ScoreOne(CombineFunction scoreCombiner, @Nullable String functionName) { super(scoreCombiner); + this.functionName = functionName; } @Override @@ -127,7 +145,10 @@ public double score(int docId, float subQueryScore) { @Override public Explanation explainScore(int docId, Explanation subQueryScore) { - return Explanation.match(1.0f, "constant score 1.0 - no function provided"); + return Explanation.match( + 1.0f, + "constant score 1.0" + Functions.nameOrEmptyFunc(functionName) + " - no function provided" + ); } }; } diff --git a/server/src/main/java/org/opensearch/common/lucene/uid/PerThreadIDVersionAndSeqNoLookup.java b/server/src/main/java/org/opensearch/common/lucene/uid/PerThreadIDVersionAndSeqNoLookup.java index a25587c36a3e5..362badf046b75 100644 --- a/server/src/main/java/org/opensearch/common/lucene/uid/PerThreadIDVersionAndSeqNoLookup.java +++ b/server/src/main/java/org/opensearch/common/lucene/uid/PerThreadIDVersionAndSeqNoLookup.java @@ -117,10 +117,8 @@ final class PerThreadIDVersionAndSeqNoLookup { * entirely for these readers. */ public DocIdAndVersion lookupVersion(BytesRef id, boolean loadSeqNo, LeafReaderContext context) throws IOException { - assert context.reader() - .getCoreCacheHelper() - .getKey() - .equals(readerKey) : "context's reader is not the same as the reader class was initialized on."; + assert context.reader().getCoreCacheHelper().getKey().equals(readerKey) + : "context's reader is not the same as the reader class was initialized on."; int docID = getDocID(id, context); if (docID != DocIdSetIterator.NO_MORE_DOCS) { @@ -174,10 +172,8 @@ private static long readNumericDocValues(LeafReader reader, String field, int do /** Return null if id is not found. */ DocIdAndSeqNo lookupSeqNo(BytesRef id, LeafReaderContext context) throws IOException { - assert context.reader() - .getCoreCacheHelper() - .getKey() - .equals(readerKey) : "context's reader is not the same as the reader class was initialized on."; + assert context.reader().getCoreCacheHelper().getKey().equals(readerKey) + : "context's reader is not the same as the reader class was initialized on."; final int docID = getDocID(id, context); if (docID != DocIdSetIterator.NO_MORE_DOCS) { final long seqNo = readNumericDocValues(context.reader(), SeqNoFieldMapper.NAME, docID); diff --git a/server/src/main/java/org/opensearch/common/settings/Setting.java b/server/src/main/java/org/opensearch/common/settings/Setting.java index 87d26eaca3405..8618687218987 100644 --- a/server/src/main/java/org/opensearch/common/settings/Setting.java +++ b/server/src/main/java/org/opensearch/common/settings/Setting.java @@ -173,9 +173,8 @@ private Setting( Validator validator, Property... properties ) { - assert this instanceof SecureSetting - || this.isGroupSetting() - || parser.apply(defaultValue.apply(Settings.EMPTY)) != null : "parser returned null"; + assert this instanceof SecureSetting || this.isGroupSetting() || parser.apply(defaultValue.apply(Settings.EMPTY)) != null + : "parser returned null"; this.key = key; this.fallbackSetting = fallbackSetting; this.defaultValue = defaultValue; diff --git a/server/src/main/java/org/opensearch/common/time/EpochTime.java b/server/src/main/java/org/opensearch/common/time/EpochTime.java index 5c6e024c7475c..7894a653492c8 100644 --- a/server/src/main/java/org/opensearch/common/time/EpochTime.java +++ b/server/src/main/java/org/opensearch/common/time/EpochTime.java @@ -43,8 +43,10 @@ import java.time.temporal.TemporalField; import java.time.temporal.TemporalUnit; import java.time.temporal.ValueRange; +import java.util.HashMap; import java.util.Locale; import java.util.Map; +import java.util.Optional; /** * This class provides {@link DateTimeFormatter}s capable of parsing epoch seconds and milliseconds. @@ -52,13 +54,14 @@ * The seconds formatter is provided by {@link #SECONDS_FORMATTER}. * The milliseconds formatter is provided by {@link #MILLIS_FORMATTER}. *

        - * Both formatters support fractional time, up to nanosecond precision. Values must be positive numbers. + * Both formatters support fractional time, up to nanosecond precision. */ class EpochTime { private static final ValueRange LONG_POSITIVE_RANGE = ValueRange.of(0, Long.MAX_VALUE); + private static final ValueRange LONG_RANGE = ValueRange.of(Long.MIN_VALUE, Long.MAX_VALUE); - private static final EpochField SECONDS = new EpochField(ChronoUnit.SECONDS, ChronoUnit.FOREVER, LONG_POSITIVE_RANGE) { + private static final EpochField SECONDS = new EpochField(ChronoUnit.SECONDS, ChronoUnit.FOREVER, LONG_RANGE) { @Override public boolean isSupportedBy(TemporalAccessor temporal) { return temporal.isSupported(ChronoField.INSTANT_SECONDS); @@ -97,15 +100,55 @@ public long getFrom(TemporalAccessor temporal) { } }; - private static final EpochField MILLIS = new EpochField(ChronoUnit.MILLIS, ChronoUnit.FOREVER, LONG_POSITIVE_RANGE) { + private static final long NEGATIVE = 0; + private static final long POSITIVE = 1; + private static final EpochField SIGN = new EpochField(ChronoUnit.FOREVER, ChronoUnit.FOREVER, ValueRange.of(NEGATIVE, POSITIVE)) { @Override public boolean isSupportedBy(TemporalAccessor temporal) { - return temporal.isSupported(ChronoField.INSTANT_SECONDS) && temporal.isSupported(ChronoField.MILLI_OF_SECOND); + return temporal.isSupported(ChronoField.INSTANT_SECONDS); + } + + @Override + public long getFrom(TemporalAccessor temporal) { + return temporal.getLong(ChronoField.INSTANT_SECONDS) < 0 ? NEGATIVE : POSITIVE; + } + }; + + // Millis as absolute values. Negative millis are encoded by having a NEGATIVE SIGN. + private static final EpochField MILLIS_ABS = new EpochField(ChronoUnit.MILLIS, ChronoUnit.FOREVER, LONG_POSITIVE_RANGE) { + @Override + public boolean isSupportedBy(TemporalAccessor temporal) { + return temporal.isSupported(ChronoField.INSTANT_SECONDS) + && (temporal.isSupported(ChronoField.NANO_OF_SECOND) || temporal.isSupported(ChronoField.MILLI_OF_SECOND)); } @Override public long getFrom(TemporalAccessor temporal) { - return temporal.getLong(ChronoField.INSTANT_SECONDS) * 1_000 + temporal.getLong(ChronoField.MILLI_OF_SECOND); + long instantSecondsInMillis = temporal.getLong(ChronoField.INSTANT_SECONDS) * 1_000; + if (instantSecondsInMillis >= 0) { + if (temporal.isSupported(ChronoField.NANO_OF_SECOND)) { + return instantSecondsInMillis + (temporal.getLong(ChronoField.NANO_OF_SECOND) / 1_000_000); + } else { + return instantSecondsInMillis + temporal.getLong(ChronoField.MILLI_OF_SECOND); + } + } else { // negative timestamp + if (temporal.isSupported(ChronoField.NANO_OF_SECOND)) { + long millis = instantSecondsInMillis; + long nanos = temporal.getLong(ChronoField.NANO_OF_SECOND); + if (nanos % 1_000_000 != 0) { + // Fractional negative timestamp. + // Add 1 ms towards positive infinity because the fraction leads + // the output's integral part to be an off-by-one when the + // `(nanos / 1_000_000)` is added below. + millis += 1; + } + millis += (nanos / 1_000_000); + return -millis; + } else { + long millisOfSecond = temporal.getLong(ChronoField.MILLI_OF_SECOND); + return -(instantSecondsInMillis + millisOfSecond); + } + } } @Override @@ -114,12 +157,37 @@ public TemporalAccessor resolve( TemporalAccessor partialTemporal, ResolverStyle resolverStyle ) { - long secondsAndMillis = fieldValues.remove(this); - long seconds = secondsAndMillis / 1_000; - long nanos = secondsAndMillis % 1000 * 1_000_000; + Long sign = Optional.ofNullable(fieldValues.remove(SIGN)).orElse(POSITIVE); + Long nanosOfMilli = fieldValues.remove(NANOS_OF_MILLI); - if (nanosOfMilli != null) { - nanos += nanosOfMilli; + long secondsAndMillis = fieldValues.remove(this); + + long seconds; + long nanos; + if (sign == NEGATIVE) { + secondsAndMillis = -secondsAndMillis; + seconds = secondsAndMillis / 1_000; + nanos = secondsAndMillis % 1000 * 1_000_000; + // `secondsAndMillis < 0` implies negative timestamp; so `nanos < 0` + if (nanosOfMilli != null) { + // aggregate fractional part of the input; subtract b/c `nanos < 0` + nanos -= nanosOfMilli; + } + if (nanos != 0) { + // nanos must be positive. B/c the timestamp is represented by the + // (seconds, nanos) tuple, seconds moves 1s toward negative-infinity + // and nanos moves 1s toward positive-infinity + seconds -= 1; + nanos = 1_000_000_000 + nanos; + } + } else { + seconds = secondsAndMillis / 1_000; + nanos = secondsAndMillis % 1000 * 1_000_000; + + if (nanosOfMilli != null) { + // aggregate fractional part of the input + nanos += nanosOfMilli; + } } fieldValues.put(ChronoField.INSTANT_SECONDS, seconds); fieldValues.put(ChronoField.NANO_OF_SECOND, nanos); @@ -127,6 +195,9 @@ public TemporalAccessor resolve( if (fieldValues.containsKey(ChronoField.MILLI_OF_SECOND)) { fieldValues.put(ChronoField.MILLI_OF_SECOND, nanos / 1_000_000); } + if (fieldValues.containsKey(ChronoField.MICRO_OF_SECOND)) { + fieldValues.put(ChronoField.MICRO_OF_SECOND, nanos / 1000); + } return null; } }; @@ -141,7 +212,11 @@ public boolean isSupportedBy(TemporalAccessor temporal) { @Override public long getFrom(TemporalAccessor temporal) { - return temporal.getLong(ChronoField.NANO_OF_SECOND) % 1_000_000; + if (temporal.getLong(ChronoField.INSTANT_SECONDS) < 0) { + return (1_000_000_000 - temporal.getLong(ChronoField.NANO_OF_SECOND)) % 1_000_000; + } else { + return temporal.getLong(ChronoField.NANO_OF_SECOND) % 1_000_000; + } } }; @@ -157,13 +232,22 @@ public long getFrom(TemporalAccessor temporal) { .appendLiteral('.') .toFormatter(Locale.ROOT); - // this supports milliseconds without any fraction - private static final DateTimeFormatter MILLISECONDS_FORMATTER1 = new DateTimeFormatterBuilder().appendValue( - MILLIS, - 1, - 19, - SignStyle.NORMAL - ).optionalStart().appendFraction(NANOS_OF_MILLI, 0, 6, true).optionalEnd().toFormatter(Locale.ROOT); + private static final Map SIGN_FORMATTER_LOOKUP = new HashMap() { + { + put(POSITIVE, ""); + put(NEGATIVE, "-"); + } + }; + + // this supports milliseconds + private static final DateTimeFormatter MILLISECONDS_FORMATTER1 = new DateTimeFormatterBuilder().optionalStart() + .appendText(SIGN, SIGN_FORMATTER_LOOKUP) // field is only created in the presence of a '-' char. + .optionalEnd() + .appendValue(MILLIS_ABS, 1, 19, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendFraction(NANOS_OF_MILLI, 0, 6, true) + .optionalEnd() + .toFormatter(Locale.ROOT); // this supports milliseconds ending in dot private static final DateTimeFormatter MILLISECONDS_FORMATTER2 = new DateTimeFormatterBuilder().append(MILLISECONDS_FORMATTER1) diff --git a/server/src/main/java/org/opensearch/common/util/BigArrays.java b/server/src/main/java/org/opensearch/common/util/BigArrays.java index 287c0999d8998..e877f75bd2a0f 100644 --- a/server/src/main/java/org/opensearch/common/util/BigArrays.java +++ b/server/src/main/java/org/opensearch/common/util/BigArrays.java @@ -455,12 +455,11 @@ public CircuitBreakerService breakerService() { private T resizeInPlace(T array, long newSize) { final long oldMemSize = array.ramBytesUsed(); final long oldSize = array.size(); - assert oldMemSize == array.ramBytesEstimated( - oldSize - ) : "ram bytes used should equal that which was previously estimated: ramBytesUsed=" - + oldMemSize - + ", ramBytesEstimated=" - + array.ramBytesEstimated(oldSize); + assert oldMemSize == array.ramBytesEstimated(oldSize) + : "ram bytes used should equal that which was previously estimated: ramBytesUsed=" + + oldMemSize + + ", ramBytesEstimated=" + + array.ramBytesEstimated(oldSize); final long estimatedIncreaseInBytes = array.ramBytesEstimated(newSize) - oldMemSize; adjustBreaker(estimatedIncreaseInBytes, false); array.resize(newSize); diff --git a/server/src/main/java/org/opensearch/common/util/concurrent/QueueResizingOpenSearchThreadPoolExecutor.java b/server/src/main/java/org/opensearch/common/util/concurrent/QueueResizingOpenSearchThreadPoolExecutor.java index eb0e5bb6ca511..39561039c5c6f 100644 --- a/server/src/main/java/org/opensearch/common/util/concurrent/QueueResizingOpenSearchThreadPoolExecutor.java +++ b/server/src/main/java/org/opensearch/common/util/concurrent/QueueResizingOpenSearchThreadPoolExecutor.java @@ -178,12 +178,11 @@ protected void afterExecute(Runnable r, Throwable t) { final long totalNanos = totalTaskNanos.addAndGet(taskNanos); final long taskExecutionNanos = timedRunnable.getTotalExecutionNanos(); - assert taskExecutionNanos >= 0 - || (failedOrRejected - && taskExecutionNanos == -1) : "expected task to always take longer than 0 nanoseconds or have '-1' failure code, got: " - + taskExecutionNanos - + ", failedOrRejected: " - + failedOrRejected; + assert taskExecutionNanos >= 0 || (failedOrRejected && taskExecutionNanos == -1) + : "expected task to always take longer than 0 nanoseconds or have '-1' failure code, got: " + + taskExecutionNanos + + ", failedOrRejected: " + + failedOrRejected; if (taskExecutionNanos != -1) { // taskExecutionNanos may be -1 if the task threw an exception executionEWMA.addValue(taskExecutionNanos); diff --git a/server/src/main/java/org/opensearch/env/NodeEnvironment.java b/server/src/main/java/org/opensearch/env/NodeEnvironment.java index db62f8f8901f4..06109e7fcdc5b 100644 --- a/server/src/main/java/org/opensearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/opensearch/env/NodeEnvironment.java @@ -563,30 +563,25 @@ public void deleteShardDirectoryUnderLock(ShardLock lock, IndexSettings indexSet } private static boolean assertPathsDoNotExist(final Path[] paths) { - Set existingPaths = Stream.of(paths) - .filter(FileSystemUtils::exists) - .filter( - leftOver -> { - // Relaxed assertion for the special case where only the empty state directory exists after deleting - // the shard directory because it was created again as a result of a metadata read action concurrently. - try (DirectoryStream children = Files.newDirectoryStream(leftOver)) { - Iterator iter = children.iterator(); - if (iter.hasNext() == false) { - return true; - } - Path maybeState = iter.next(); - if (iter.hasNext() || maybeState.equals(leftOver.resolve(MetadataStateFormat.STATE_DIR_NAME)) == false) { - return true; - } - try (DirectoryStream stateChildren = Files.newDirectoryStream(maybeState)) { - return stateChildren.iterator().hasNext(); - } - } catch (IOException e) { - throw new UncheckedIOException(e); - } + Set existingPaths = Stream.of(paths).filter(FileSystemUtils::exists).filter(leftOver -> { + // Relaxed assertion for the special case where only the empty state directory exists after deleting + // the shard directory because it was created again as a result of a metadata read action concurrently. + try (DirectoryStream children = Files.newDirectoryStream(leftOver)) { + Iterator iter = children.iterator(); + if (iter.hasNext() == false) { + return true; } - ) - .collect(Collectors.toSet()); + Path maybeState = iter.next(); + if (iter.hasNext() || maybeState.equals(leftOver.resolve(MetadataStateFormat.STATE_DIR_NAME)) == false) { + return true; + } + try (DirectoryStream stateChildren = Files.newDirectoryStream(maybeState)) { + return stateChildren.iterator().hasNext(); + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }).collect(Collectors.toSet()); assert existingPaths.size() == 0 : "Paths exist that should have been deleted: " + existingPaths; return existingPaths.size() == 0; } diff --git a/server/src/main/java/org/opensearch/env/NodeMetadata.java b/server/src/main/java/org/opensearch/env/NodeMetadata.java index bce0209732356..cb6257002b62b 100644 --- a/server/src/main/java/org/opensearch/env/NodeMetadata.java +++ b/server/src/main/java/org/opensearch/env/NodeMetadata.java @@ -125,8 +125,8 @@ public void setNodeVersionId(int nodeVersionId) { public NodeMetadata build() { final Version nodeVersion; if (this.nodeVersion == null) { - assert Version.CURRENT.major <= LegacyESVersion.V_7_0_0.major - + 1 : "version is required in the node metadata from v9 onwards"; + assert Version.CURRENT.major <= LegacyESVersion.V_7_0_0.major + 1 + : "version is required in the node metadata from v9 onwards"; nodeVersion = Version.V_EMPTY; } else { nodeVersion = this.nodeVersion; diff --git a/server/src/main/java/org/opensearch/gateway/GatewayMetaState.java b/server/src/main/java/org/opensearch/gateway/GatewayMetaState.java index ce4f749be0fe5..fd978a9c8ed8b 100644 --- a/server/src/main/java/org/opensearch/gateway/GatewayMetaState.java +++ b/server/src/main/java/org/opensearch/gateway/GatewayMetaState.java @@ -134,8 +134,8 @@ public void start( long currentTerm = onDiskState.currentTerm; if (onDiskState.empty()) { - assert Version.CURRENT.major <= LegacyESVersion.V_7_0_0.major - + 1 : "legacy metadata loader is not needed anymore from v9 onwards"; + assert Version.CURRENT.major <= LegacyESVersion.V_7_0_0.major + 1 + : "legacy metadata loader is not needed anymore from v9 onwards"; final Tuple legacyState = metaStateService.loadFullState(); if (legacyState.v1().isEmpty() == false) { metadata = legacyState.v2(); diff --git a/server/src/main/java/org/opensearch/gateway/PrimaryShardAllocator.java b/server/src/main/java/org/opensearch/gateway/PrimaryShardAllocator.java index 1e6c6d15dd91c..dbde8abf02d6d 100644 --- a/server/src/main/java/org/opensearch/gateway/PrimaryShardAllocator.java +++ b/server/src/main/java/org/opensearch/gateway/PrimaryShardAllocator.java @@ -368,11 +368,10 @@ protected static NodeShardsResult buildNodeShardsResult( } if (allocationId != null) { - assert nodeShardState.storeException() == null - || nodeShardState - .storeException() instanceof ShardLockObtainFailedException : "only allow store that can be opened or that throws a ShardLockObtainFailedException while being opened but got a " - + "store throwing " - + nodeShardState.storeException(); + assert nodeShardState.storeException() == null || nodeShardState.storeException() instanceof ShardLockObtainFailedException + : "only allow store that can be opened or that throws a ShardLockObtainFailedException while being opened but got a " + + "store throwing " + + nodeShardState.storeException(); numberOfAllocationsFound++; if (matchAnyShard || inSyncAllocationIds.contains(nodeShardState.allocationId())) { nodeShardStates.add(nodeShardState); diff --git a/server/src/main/java/org/opensearch/http/DefaultRestChannel.java b/server/src/main/java/org/opensearch/http/DefaultRestChannel.java index 70c386b16ee03..d94eadf82463a 100644 --- a/server/src/main/java/org/opensearch/http/DefaultRestChannel.java +++ b/server/src/main/java/org/opensearch/http/DefaultRestChannel.java @@ -128,8 +128,8 @@ public void sendResponse(RestResponse restResponse) { finalContent = BytesArray.EMPTY; } } catch (IllegalArgumentException ignored) { - assert restResponse - .status() == RestStatus.METHOD_NOT_ALLOWED : "request HTTP method is unsupported but HTTP status is not METHOD_NOT_ALLOWED(405)"; + assert restResponse.status() == RestStatus.METHOD_NOT_ALLOWED + : "request HTTP method is unsupported but HTTP status is not METHOD_NOT_ALLOWED(405)"; } final HttpResponse httpResponse = httpRequest.createResponse(restResponse.status(), finalContent); diff --git a/server/src/main/java/org/opensearch/index/IndexService.java b/server/src/main/java/org/opensearch/index/IndexService.java index a14afde7ebafb..1b301e85365ba 100644 --- a/server/src/main/java/org/opensearch/index/IndexService.java +++ b/server/src/main/java/org/opensearch/index/IndexService.java @@ -624,13 +624,28 @@ public IndexSettings getIndexSettings() { } /** - * Creates a new QueryShardContext. The context has not types set yet, if types are required set them via - * {@link QueryShardContext#setTypes(String...)}. + * Creates a new QueryShardContext. * * Passing a {@code null} {@link IndexSearcher} will return a valid context, however it won't be able to make * {@link IndexReader}-specific optimizations, such as rewriting containing range queries. */ public QueryShardContext newQueryShardContext(int shardId, IndexSearcher searcher, LongSupplier nowInMillis, String clusterAlias) { + return newQueryShardContext(shardId, searcher, nowInMillis, clusterAlias, false); + } + + /** + * Creates a new QueryShardContext. + * + * Passing a {@code null} {@link IndexSearcher} will return a valid context, however it won't be able to make + * {@link IndexReader}-specific optimizations, such as rewriting containing range queries. + */ + public QueryShardContext newQueryShardContext( + int shardId, + IndexSearcher searcher, + LongSupplier nowInMillis, + String clusterAlias, + boolean validate + ) { final SearchIndexNameMatcher indexNameMatcher = new SearchIndexNameMatcher( index().getName(), clusterAlias, @@ -654,7 +669,8 @@ public QueryShardContext newQueryShardContext(int shardId, IndexSearcher searche clusterAlias, indexNameMatcher, allowExpensiveQueries, - valuesSourceRegistry + valuesSourceRegistry, + validate ); } diff --git a/server/src/main/java/org/opensearch/index/IndexingSlowLog.java b/server/src/main/java/org/opensearch/index/IndexingSlowLog.java index fca91983b2d12..b77e7639152fb 100644 --- a/server/src/main/java/org/opensearch/index/IndexingSlowLog.java +++ b/server/src/main/java/org/opensearch/index/IndexingSlowLog.java @@ -226,7 +226,6 @@ private static Map prepareMap( map.put("message", index); map.put("took", TimeValue.timeValueNanos(tookInNanos)); map.put("took_millis", "" + TimeUnit.NANOSECONDS.toMillis(tookInNanos)); - map.put("doc_type", doc.type()); map.put("id", doc.id()); map.put("routing", doc.routing()); @@ -258,7 +257,6 @@ private static String message(Index index, ParsedDocument doc, long tookInNanos, sb.append(index).append(" "); sb.append("took[").append(TimeValue.timeValueNanos(tookInNanos)).append("], "); sb.append("took_millis[").append(TimeUnit.NANOSECONDS.toMillis(tookInNanos)).append("], "); - sb.append("type[").append(doc.type()).append("], "); sb.append("id[").append(doc.id()).append("], "); if (doc.routing() == null) { sb.append("routing[]"); diff --git a/server/src/main/java/org/opensearch/index/SearchSlowLog.java b/server/src/main/java/org/opensearch/index/SearchSlowLog.java index b463f46e476bf..37413388215c8 100644 --- a/server/src/main/java/org/opensearch/index/SearchSlowLog.java +++ b/server/src/main/java/org/opensearch/index/SearchSlowLog.java @@ -47,7 +47,6 @@ import org.opensearch.tasks.Task; import java.nio.charset.Charset; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -220,8 +219,6 @@ private static Map prepareMap(SearchContext context, long tookIn } else { messageFields.put("total_hits", "-1"); } - String[] types = context.getQueryShardContext().getTypes(); - messageFields.put("types", escapeJson(asJsonArray(types != null ? Arrays.stream(types) : Stream.empty()))); messageFields.put( "stats", escapeJson(asJsonArray(context.groupStats() != null ? context.groupStats().stream() : Stream.empty())) @@ -259,13 +256,6 @@ private static String message(SearchContext context, long tookInNanos) { sb.append("-1"); } sb.append("], "); - if (context.getQueryShardContext().getTypes() == null) { - sb.append("types[], "); - } else { - sb.append("types["); - Strings.arrayToDelimitedString(context.getQueryShardContext().getTypes(), ",", sb); - sb.append("], "); - } if (context.groupStats() == null) { sb.append("stats[], "); } else { diff --git a/server/src/main/java/org/opensearch/index/codec/CodecServiceConfig.java b/server/src/main/java/org/opensearch/index/codec/CodecServiceConfig.java new file mode 100644 index 0000000000000..313c0d359bb02 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/CodecServiceConfig.java @@ -0,0 +1,45 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec; + +import org.apache.logging.log4j.Logger; +import org.opensearch.common.Nullable; +import org.opensearch.index.IndexSettings; +import org.opensearch.index.mapper.MapperService; + +import java.util.Objects; + +/** + * The configuration parameters necessary for the {@link CodecService} instance construction. + */ +public final class CodecServiceConfig { + private final IndexSettings indexSettings; + private final MapperService mapperService; + private final Logger logger; + + public CodecServiceConfig(IndexSettings indexSettings, @Nullable MapperService mapperService, @Nullable Logger logger) { + this.indexSettings = Objects.requireNonNull(indexSettings); + this.mapperService = mapperService; + this.logger = logger; + } + + public IndexSettings getIndexSettings() { + return indexSettings; + } + + @Nullable + public MapperService getMapperService() { + return mapperService; + } + + @Nullable + public Logger getLogger() { + return logger; + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/CodecServiceFactory.java b/server/src/main/java/org/opensearch/index/codec/CodecServiceFactory.java new file mode 100644 index 0000000000000..da28c5f06b035 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/CodecServiceFactory.java @@ -0,0 +1,22 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec; + +/** + * A factory for creating new {@link CodecService} instance + */ +@FunctionalInterface +public interface CodecServiceFactory { + /** + * Create new {@link CodecService} instance + * @param config code service configuration + * @return new {@link CodecService} instance + */ + CodecService createCodecService(CodecServiceConfig config); +} diff --git a/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java b/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java index 73f217ee19501..06f2216a28812 100644 --- a/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java +++ b/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java @@ -57,11 +57,8 @@ public class PerFieldMappingPostingFormatCodec extends Lucene87Codec { private final DocValuesFormat dvFormat = new Lucene80DocValuesFormat(Lucene80DocValuesFormat.Mode.BEST_COMPRESSION); static { - assert Codec.forName(Lucene.LATEST_CODEC) - .getClass() - .isAssignableFrom(PerFieldMappingPostingFormatCodec.class) : "PerFieldMappingPostingFormatCodec must subclass the latest " - + "lucene codec: " - + Lucene.LATEST_CODEC; + assert Codec.forName(Lucene.LATEST_CODEC).getClass().isAssignableFrom(PerFieldMappingPostingFormatCodec.class) + : "PerFieldMappingPostingFormatCodec must subclass the latest " + "lucene codec: " + Lucene.LATEST_CODEC; } public PerFieldMappingPostingFormatCodec(Mode compressionMode, MapperService mapperService, Logger logger) { diff --git a/server/src/main/java/org/opensearch/index/engine/DocumentMissingException.java b/server/src/main/java/org/opensearch/index/engine/DocumentMissingException.java index 95ae422e73f64..9eb54292e13bd 100644 --- a/server/src/main/java/org/opensearch/index/engine/DocumentMissingException.java +++ b/server/src/main/java/org/opensearch/index/engine/DocumentMissingException.java @@ -39,8 +39,8 @@ public class DocumentMissingException extends EngineException { - public DocumentMissingException(ShardId shardId, String type, String id) { - super(shardId, "[" + type + "][" + id + "]: document missing"); + public DocumentMissingException(ShardId shardId, String id) { + super(shardId, "[" + id + "]: document missing"); } public DocumentMissingException(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/opensearch/index/engine/DocumentSourceMissingException.java b/server/src/main/java/org/opensearch/index/engine/DocumentSourceMissingException.java index bfd595c9be5eb..333abc3794a5c 100644 --- a/server/src/main/java/org/opensearch/index/engine/DocumentSourceMissingException.java +++ b/server/src/main/java/org/opensearch/index/engine/DocumentSourceMissingException.java @@ -39,8 +39,8 @@ public class DocumentSourceMissingException extends EngineException { - public DocumentSourceMissingException(ShardId shardId, String type, String id) { - super(shardId, "[" + type + "][" + id + "]: document source missing"); + public DocumentSourceMissingException(ShardId shardId, String id) { + super(shardId, "[" + id + "]: document source missing"); } public DocumentSourceMissingException(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/opensearch/index/engine/Engine.java b/server/src/main/java/org/opensearch/index/engine/Engine.java index 1f330990348dc..825d71d6d1024 100644 --- a/server/src/main/java/org/opensearch/index/engine/Engine.java +++ b/server/src/main/java/org/opensearch/index/engine/Engine.java @@ -55,10 +55,10 @@ import org.apache.lucene.util.SetOnce; import org.opensearch.ExceptionsHelper; import org.opensearch.action.index.IndexRequest; -import org.opensearch.common.CheckedRunnable; import org.opensearch.common.Nullable; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.collect.ImmutableOpenMap; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lease.Releasables; import org.opensearch.common.logging.Loggers; @@ -72,7 +72,6 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.ReleasableLock; import org.opensearch.index.VersionType; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.Mapping; import org.opensearch.index.mapper.ParseContext.Document; import org.opensearch.index.mapper.ParsedDocument; @@ -730,7 +729,7 @@ public enum SearcherScope { /** * Acquires a lock on the translog files and Lucene soft-deleted documents to prevent them from being trimmed */ - public abstract Closeable acquireHistoryRetentionLock(HistorySource historySource); + public abstract Closeable acquireHistoryRetentionLock(); /** * Creates a new history snapshot from Lucene for reading operations whose seqno in the requesting seqno range (both inclusive). @@ -738,57 +737,22 @@ public enum SearcherScope { */ public abstract Translog.Snapshot newChangesSnapshot( String source, - MapperService mapperService, long fromSeqNo, long toSeqNo, - boolean requiredFullRange + boolean requiredFullRange, + boolean accurateCount ) throws IOException; /** - * Creates a new history snapshot from either Lucene/Translog for reading operations whose seqno in the requesting - * seqno range (both inclusive). + * Counts the number of history operations in the given sequence number range + * @param source source of the request + * @param fromSeqNo from sequence number; included + * @param toSeqNumber to sequence number; included + * @return number of history operations */ - public Translog.Snapshot newChangesSnapshot( - String source, - HistorySource historySource, - MapperService mapperService, - long fromSeqNo, - long toSeqNo, - boolean requiredFullRange - ) throws IOException { - return newChangesSnapshot(source, mapperService, fromSeqNo, toSeqNo, requiredFullRange); - } - - /** - * Creates a new history snapshot for reading operations since {@code startingSeqNo} (inclusive). - * The returned snapshot can be retrieved from either Lucene index or translog files. - */ - public abstract Translog.Snapshot readHistoryOperations( - String reason, - HistorySource historySource, - MapperService mapperService, - long startingSeqNo - ) throws IOException; - - /** - * Returns the estimated number of history operations whose seq# at least {@code startingSeqNo}(inclusive) in this engine. - */ - public abstract int estimateNumberOfHistoryOperations( - String reason, - HistorySource historySource, - MapperService mapperService, - long startingSeqNo - ) throws IOException; + public abstract int countNumberOfHistoryOperations(String source, long fromSeqNo, long toSeqNumber) throws IOException; - /** - * Checks if this engine has every operations since {@code startingSeqNo}(inclusive) in its history (either Lucene or translog) - */ - public abstract boolean hasCompleteOperationHistory( - String reason, - HistorySource historySource, - MapperService mapperService, - long startingSeqNo - ) throws IOException; + public abstract boolean hasCompleteOperationHistory(String reason, long startingSeqNo); /** * Gets the minimum retained sequence number for this engine. @@ -1152,12 +1116,12 @@ public abstract void forceMerge( * * @param flushFirst indicates whether the engine should flush before returning the snapshot */ - public abstract IndexCommitRef acquireLastIndexCommit(boolean flushFirst) throws EngineException; + public abstract GatedCloseable acquireLastIndexCommit(boolean flushFirst) throws EngineException; /** * Snapshots the most recent safe index commit from the engine. */ - public abstract IndexCommitRef acquireSafeIndexCommit() throws EngineException; + public abstract GatedCloseable acquireSafeIndexCommit() throws EngineException; /** * @return a summary of the contents of the current safe commit @@ -1433,8 +1397,6 @@ public long startTime() { return this.startTime; } - public abstract String type(); - abstract String id(); public abstract TYPE operationType(); @@ -1466,11 +1428,8 @@ public Index( assert (origin == Origin.PRIMARY) == (versionType != null) : "invalid version_type=" + versionType + " for origin=" + origin; assert ifPrimaryTerm >= 0 : "ifPrimaryTerm [" + ifPrimaryTerm + "] must be non negative"; assert ifSeqNo == UNASSIGNED_SEQ_NO || ifSeqNo >= 0 : "ifSeqNo [" + ifSeqNo + "] must be non negative or unset"; - assert (origin == Origin.PRIMARY) - || (ifSeqNo == UNASSIGNED_SEQ_NO - && ifPrimaryTerm == UNASSIGNED_PRIMARY_TERM) : "cas operations are only allowed if origin is primary. get [" - + origin - + "]"; + assert (origin == Origin.PRIMARY) || (ifSeqNo == UNASSIGNED_SEQ_NO && ifPrimaryTerm == UNASSIGNED_PRIMARY_TERM) + : "cas operations are only allowed if origin is primary. get [" + origin + "]"; this.doc = doc; this.isRetry = isRetry; this.autoGeneratedIdTimestamp = autoGeneratedIdTimestamp; @@ -1503,11 +1462,6 @@ public ParsedDocument parsedDoc() { return this.doc; } - @Override - public String type() { - return this.doc.type(); - } - @Override public String id() { return this.doc.id(); @@ -1532,7 +1486,7 @@ public BytesReference source() { @Override public int estimatedSizeInBytes() { - return (id().length() + type().length()) * 2 + source().length() + 12; + return id().length() * 2 + source().length() + 12; } /** @@ -1563,13 +1517,11 @@ public long getIfPrimaryTerm() { public static class Delete extends Operation { - private final String type; private final String id; private final long ifSeqNo; private final long ifPrimaryTerm; public Delete( - String type, String id, Term uid, long seqNo, @@ -1585,20 +1537,15 @@ public Delete( assert (origin == Origin.PRIMARY) == (versionType != null) : "invalid version_type=" + versionType + " for origin=" + origin; assert ifPrimaryTerm >= 0 : "ifPrimaryTerm [" + ifPrimaryTerm + "] must be non negative"; assert ifSeqNo == UNASSIGNED_SEQ_NO || ifSeqNo >= 0 : "ifSeqNo [" + ifSeqNo + "] must be non negative or unset"; - assert (origin == Origin.PRIMARY) - || (ifSeqNo == UNASSIGNED_SEQ_NO - && ifPrimaryTerm == UNASSIGNED_PRIMARY_TERM) : "cas operations are only allowed if origin is primary. get [" - + origin - + "]"; - this.type = Objects.requireNonNull(type); + assert (origin == Origin.PRIMARY) || (ifSeqNo == UNASSIGNED_SEQ_NO && ifPrimaryTerm == UNASSIGNED_PRIMARY_TERM) + : "cas operations are only allowed if origin is primary. get [" + origin + "]"; this.id = Objects.requireNonNull(id); this.ifSeqNo = ifSeqNo; this.ifPrimaryTerm = ifPrimaryTerm; } - public Delete(String type, String id, Term uid, long primaryTerm) { + public Delete(String id, Term uid, long primaryTerm) { this( - type, id, uid, UNASSIGNED_SEQ_NO, @@ -1614,7 +1561,6 @@ public Delete(String type, String id, Term uid, long primaryTerm) { public Delete(Delete template, VersionType versionType) { this( - template.type(), template.id(), template.uid(), template.seqNo(), @@ -1628,11 +1574,6 @@ public Delete(Delete template, VersionType versionType) { ); } - @Override - public String type() { - return this.type; - } - @Override public String id() { return this.id; @@ -1675,11 +1616,6 @@ public Term uid() { throw new UnsupportedOperationException(); } - @Override - public String type() { - throw new UnsupportedOperationException(); - } - @Override public long version() { throw new UnsupportedOperationException(); @@ -1710,16 +1646,15 @@ public int estimatedSizeInBytes() { public static class Get { private final boolean realtime; private final Term uid; - private final String type, id; + private final String id; private final boolean readFromTranslog; private long version = Versions.MATCH_ANY; private VersionType versionType = VersionType.INTERNAL; private long ifSeqNo = UNASSIGNED_SEQ_NO; private long ifPrimaryTerm = UNASSIGNED_PRIMARY_TERM; - public Get(boolean realtime, boolean readFromTranslog, String type, String id, Term uid) { + public Get(boolean realtime, boolean readFromTranslog, String id, Term uid) { this.realtime = realtime; - this.type = type; this.id = id; this.uid = uid; this.readFromTranslog = readFromTranslog; @@ -1729,10 +1664,6 @@ public boolean realtime() { return this.realtime; } - public String type() { - return type; - } - public String id() { return id; } @@ -1883,28 +1814,6 @@ private void awaitPendingClose() { } } - public static class IndexCommitRef implements Closeable { - private final AtomicBoolean closed = new AtomicBoolean(); - private final CheckedRunnable onClose; - private final IndexCommit indexCommit; - - public IndexCommitRef(IndexCommit indexCommit, CheckedRunnable onClose) { - this.indexCommit = indexCommit; - this.onClose = onClose; - } - - @Override - public void close() throws IOException { - if (closed.compareAndSet(false, true)) { - onClose.run(); - } - } - - public IndexCommit getIndexCommit() { - return indexCommit; - } - } - public void onSettingsChanged(TimeValue translogRetentionAge, ByteSizeValue translogRetentionSize, long softDeletesRetentionOps) { } @@ -2040,12 +1949,4 @@ public interface TranslogRecoveryRunner { * to advance this marker to at least the given sequence number. */ public abstract void advanceMaxSeqNoOfUpdatesOrDeletes(long maxSeqNoOfUpdatesOnPrimary); - - /** - * Whether we should read history operations from translog or Lucene index - */ - public enum HistorySource { - TRANSLOG, - INDEX - } } diff --git a/server/src/main/java/org/opensearch/index/engine/EngineConfig.java b/server/src/main/java/org/opensearch/index/engine/EngineConfig.java index fd02f3049cc8e..d1085b01a3707 100644 --- a/server/src/main/java/org/opensearch/index/engine/EngineConfig.java +++ b/server/src/main/java/org/opensearch/index/engine/EngineConfig.java @@ -466,7 +466,7 @@ public interface TombstoneDocSupplier { /** * Creates a tombstone document for a delete operation. */ - ParsedDocument newDeleteTombstoneDoc(String type, String id); + ParsedDocument newDeleteTombstoneDoc(String id); /** * Creates a tombstone document for a noop operation. diff --git a/server/src/main/java/org/opensearch/index/engine/EngineConfigFactory.java b/server/src/main/java/org/opensearch/index/engine/EngineConfigFactory.java index dffdb58bfec1e..a78a5e5a4820a 100644 --- a/server/src/main/java/org/opensearch/index/engine/EngineConfigFactory.java +++ b/server/src/main/java/org/opensearch/index/engine/EngineConfigFactory.java @@ -8,6 +8,7 @@ package org.opensearch.index.engine; +import org.apache.logging.log4j.Logger; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.index.MergePolicy; import org.apache.lucene.search.QueryCache; @@ -15,9 +16,13 @@ import org.apache.lucene.search.ReferenceManager; import org.apache.lucene.search.Sort; import org.apache.lucene.search.similarities.Similarity; +import org.opensearch.common.Nullable; import org.opensearch.common.unit.TimeValue; import org.opensearch.index.IndexSettings; import org.opensearch.index.codec.CodecService; +import org.opensearch.index.codec.CodecServiceConfig; +import org.opensearch.index.codec.CodecServiceFactory; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.seqno.RetentionLeases; import org.opensearch.index.shard.ShardId; import org.opensearch.index.store.Store; @@ -39,7 +44,7 @@ * A factory to create an EngineConfig based on custom plugin overrides */ public class EngineConfigFactory { - private final CodecService codecService; + private final CodecServiceFactory codecServiceFactory; private final TranslogDeletionPolicyFactory translogDeletionPolicyFactory; /** default ctor primarily used for tests without plugins */ @@ -58,6 +63,8 @@ public EngineConfigFactory(PluginsService pluginsService, IndexSettings idxSetti EngineConfigFactory(Collection enginePlugins, IndexSettings idxSettings) { Optional codecService = Optional.empty(); String codecServiceOverridingPlugin = null; + Optional codecServiceFactory = Optional.empty(); + String codecServiceFactoryOverridingPlugin = null; Optional translogDeletionPolicyFactory = Optional.empty(); String translogDeletionPolicyOverridingPlugin = null; for (EnginePlugin enginePlugin : enginePlugins) { @@ -65,7 +72,7 @@ public EngineConfigFactory(PluginsService pluginsService, IndexSettings idxSetti if (codecService.isPresent() == false) { codecService = enginePlugin.getCustomCodecService(idxSettings); codecServiceOverridingPlugin = enginePlugin.getClass().getName(); - } else { + } else if (enginePlugin.getCustomCodecService(idxSettings).isPresent()) { throw new IllegalStateException( "existing codec service already overridden in: " + codecServiceOverridingPlugin @@ -76,7 +83,7 @@ public EngineConfigFactory(PluginsService pluginsService, IndexSettings idxSetti if (translogDeletionPolicyFactory.isPresent() == false) { translogDeletionPolicyFactory = enginePlugin.getCustomTranslogDeletionPolicyFactory(); translogDeletionPolicyOverridingPlugin = enginePlugin.getClass().getName(); - } else { + } else if (enginePlugin.getCustomTranslogDeletionPolicyFactory().isPresent()) { throw new IllegalStateException( "existing TranslogDeletionPolicyFactory is already overridden in: " + translogDeletionPolicyOverridingPlugin @@ -84,12 +91,37 @@ public EngineConfigFactory(PluginsService pluginsService, IndexSettings idxSetti + enginePlugin.getClass().getName() ); } + // get overriding CodecServiceFactory from EnginePlugin + if (codecServiceFactory.isPresent() == false) { + codecServiceFactory = enginePlugin.getCustomCodecServiceFactory(idxSettings); + codecServiceFactoryOverridingPlugin = enginePlugin.getClass().getName(); + } else if (enginePlugin.getCustomCodecServiceFactory(idxSettings).isPresent()) { + throw new IllegalStateException( + "existing codec service factory already overridden in: " + + codecServiceFactoryOverridingPlugin + + " attempting to override again by: " + + enginePlugin.getClass().getName() + ); + } + } + + if (codecService.isPresent() && codecServiceFactory.isPresent()) { + throw new IllegalStateException( + "both codec service and codec service factory are present, codec service provided by: " + + codecServiceOverridingPlugin + + " conflicts with codec service factory provided by: " + + codecServiceFactoryOverridingPlugin + ); } - this.codecService = codecService.orElse(null); + + final CodecService instance = codecService.orElse(null); + this.codecServiceFactory = (instance != null) ? (config) -> instance : codecServiceFactory.orElse(null); this.translogDeletionPolicyFactory = translogDeletionPolicyFactory.orElse((idxs, rtls) -> null); } - /** Instantiates a new EngineConfig from the provided custom overrides */ + /** + * Instantiates a new EngineConfig from the provided custom overrides + */ public EngineConfig newEngineConfig( ShardId shardId, ThreadPool threadPool, @@ -114,6 +146,10 @@ public EngineConfig newEngineConfig( LongSupplier primaryTermSupplier, EngineConfig.TombstoneDocSupplier tombstoneDocSupplier ) { + CodecService codecServiceToUse = codecService; + if (codecService == null && this.codecServiceFactory != null) { + codecServiceToUse = newCodecServiceOrDefault(indexSettings, null, null, null); + } return new EngineConfig( shardId, @@ -124,7 +160,7 @@ public EngineConfig newEngineConfig( mergePolicy, analyzer, similarity, - this.codecService != null ? this.codecService : codecService, + codecServiceToUse, eventListener, queryCache, queryCachingPolicy, @@ -141,4 +177,15 @@ public EngineConfig newEngineConfig( tombstoneDocSupplier ); } + + public CodecService newCodecServiceOrDefault( + IndexSettings indexSettings, + @Nullable MapperService mapperService, + Logger logger, + CodecService defaultCodecService + ) { + return this.codecServiceFactory != null + ? this.codecServiceFactory.createCodecService(new CodecServiceConfig(indexSettings, mapperService, logger)) + : defaultCodecService; + } } diff --git a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java index ae508d627a00a..1c5f06e85cb88 100644 --- a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java @@ -72,6 +72,7 @@ import org.opensearch.common.Booleans; import org.opensearch.common.Nullable; import org.opensearch.common.SuppressForbidden; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lucene.LoggerInfoStream; import org.opensearch.common.lucene.Lucene; @@ -90,7 +91,6 @@ import org.opensearch.index.VersionType; import org.opensearch.index.fieldvisitor.IdOnlyFieldVisitor; import org.opensearch.index.mapper.IdFieldMapper; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.ParseContext; import org.opensearch.index.mapper.ParsedDocument; import org.opensearch.index.mapper.SeqNoFieldMapper; @@ -103,11 +103,10 @@ import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.index.shard.OpenSearchMergePolicy; import org.opensearch.index.shard.ShardId; -import org.opensearch.index.store.Store; +import org.opensearch.index.translog.DefaultTranslogDeletionPolicy; import org.opensearch.index.translog.Translog; import org.opensearch.index.translog.TranslogConfig; import org.opensearch.index.translog.TranslogCorruptedException; -import org.opensearch.index.translog.DefaultTranslogDeletionPolicy; import org.opensearch.index.translog.TranslogDeletionPolicy; import org.opensearch.index.translog.TranslogStats; import org.opensearch.search.suggest.completion.CompletionStats; @@ -115,7 +114,6 @@ import java.io.Closeable; import java.io.IOException; -import java.nio.file.Path; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -252,7 +250,7 @@ public InternalEngine(EngineConfig engineConfig) { mergeScheduler = scheduler = new EngineMergeScheduler(engineConfig.getShardId(), engineConfig.getIndexSettings()); throttle = new IndexThrottle(); try { - trimUnsafeCommits(engineConfig); + store.trimUnsafeCommits(engineConfig.getTranslogConfig().getTranslogPath()); translog = openTranslog(engineConfig, translogDeletionPolicy, engineConfig.getGlobalCheckpointSupplier(), seqNo -> { final LocalCheckpointTracker tracker = getLocalCheckpointTracker(); assert tracker != null || getTranslog().isOpen() == false; @@ -480,8 +478,8 @@ public int fillSeqNoGaps(long primaryTerm) throws IOException { } syncTranslog(); // to persist noops associated with the advancement of the local checkpoint - assert localCheckpointTracker - .getPersistedCheckpoint() == maxSeqNo : "persisted local checkpoint did not advance to max seq no; is [" + assert localCheckpointTracker.getPersistedCheckpoint() == maxSeqNo + : "persisted local checkpoint did not advance to max seq no; is [" + localCheckpointTracker.getPersistedCheckpoint() + "], max seq no [" + maxSeqNo @@ -608,45 +606,6 @@ public void syncTranslog() throws IOException { revisitIndexDeletionPolicyOnTranslogSynced(); } - /** - * Creates a new history snapshot for reading operations since the provided seqno. - * The returned snapshot can be retrieved from either Lucene index or translog files. - */ - @Override - public Translog.Snapshot readHistoryOperations( - String reason, - HistorySource historySource, - MapperService mapperService, - long startingSeqNo - ) throws IOException { - if (historySource == HistorySource.INDEX) { - return newChangesSnapshot(reason, mapperService, Math.max(0, startingSeqNo), Long.MAX_VALUE, false); - } else { - return getTranslog().newSnapshot(startingSeqNo, Long.MAX_VALUE); - } - } - - /** - * Returns the estimated number of history operations whose seq# at least the provided seq# in this engine. - */ - @Override - public int estimateNumberOfHistoryOperations( - String reason, - HistorySource historySource, - MapperService mapperService, - long startingSeqNo - ) throws IOException { - if (historySource == HistorySource.INDEX) { - try ( - Translog.Snapshot snapshot = newChangesSnapshot(reason, mapperService, Math.max(0, startingSeqNo), Long.MAX_VALUE, false) - ) { - return snapshot.totalOperations(); - } - } else { - return getTranslog().estimateTotalOperationsFromMinSeq(startingSeqNo); - } - } - @Override public TranslogStats getTranslogStats() { return getTranslog().stats(); @@ -1348,10 +1307,10 @@ private IndexingStrategy( int reservedDocs, IndexResult earlyResultOnPreFlightError ) { - assert useLuceneUpdateDocument == false - || indexIntoLucene : "use lucene update is set to true, but we're not indexing into lucene"; - assert (indexIntoLucene - && earlyResultOnPreFlightError != null) == false : "can only index into lucene or have a preflight result but not both." + assert useLuceneUpdateDocument == false || indexIntoLucene + : "use lucene update is set to true, but we're not indexing into lucene"; + assert (indexIntoLucene && earlyResultOnPreFlightError != null) == false + : "can only index into lucene or have a preflight result but not both." + "indexIntoLucene: " + indexIntoLucene + " earlyResultOnPreFlightError:" @@ -1416,15 +1375,13 @@ private boolean assertDocDoesNotExist(final Index index, final boolean allowDele final VersionValue versionValue = versionMap.getVersionForAssert(index.uid().bytes()); if (versionValue != null) { if (versionValue.isDelete() == false || allowDeleted == false) { - throw new AssertionError( - "doc [" + index.type() + "][" + index.id() + "] exists in version map (version " + versionValue + ")" - ); + throw new AssertionError("doc [" + index.id() + "] exists in version map (version " + versionValue + ")"); } } else { try (Searcher searcher = acquireSearcher("assert doc doesn't exist", SearcherScope.INTERNAL)) { final long docsWithId = searcher.count(new TermQuery(index.uid())); if (docsWithId > 0) { - throw new AssertionError("doc [" + index.type() + "][" + index.id() + "] exists [" + docsWithId + "] times in index"); + throw new AssertionError("doc [" + index.id() + "] exists [" + docsWithId + "] times in index"); } } } @@ -1460,7 +1417,6 @@ public DeleteResult delete(Delete delete) throws IOException { // generate or register sequence number if (delete.origin() == Operation.Origin.PRIMARY) { delete = new Delete( - delete.type(), delete.id(), delete.uid(), generateSeqNoForOperationOnPrimary(delete), @@ -1648,7 +1604,7 @@ private DeletionStrategy planDeletionAsPrimary(Delete delete) throws IOException private DeleteResult deleteInLucene(Delete delete, DeletionStrategy plan) throws IOException { assert assertMaxSeqNoOfUpdatesIsAdvanced(delete.uid(), delete.seqNo(), false, false); try { - final ParsedDocument tombstone = engineConfig.getTombstoneDocSupplier().newDeleteTombstoneDoc(delete.type(), delete.id()); + final ParsedDocument tombstone = engineConfig.getTombstoneDocSupplier().newDeleteTombstoneDoc(delete.id()); assert tombstone.docs().size() == 1 : "Tombstone doc should have single doc [" + tombstone + "]"; tombstone.updateSeqID(delete.seqNo(), delete.primaryTerm()); tombstone.version().setLongValue(plan.versionOfDeletion); @@ -1699,8 +1655,8 @@ private DeletionStrategy( int reservedDocs, DeleteResult earlyResultOnPreflightError ) { - assert (deleteFromLucene - && earlyResultOnPreflightError != null) == false : "can only delete from lucene or have a preflight result but not both." + assert (deleteFromLucene && earlyResultOnPreflightError != null) == false + : "can only delete from lucene or have a preflight result but not both." + "deleteFromLucene: " + deleteFromLucene + " earlyResultOnPreFlightError:" @@ -1808,9 +1764,8 @@ private NoOpResult innerNoOp(final NoOp noOp) throws IOException { tombstone.version().setLongValue(1L); assert tombstone.docs().size() == 1 : "Tombstone should have a single doc [" + tombstone + "]"; final ParseContext.Document doc = tombstone.docs().get(0); - assert doc.getField( - SeqNoFieldMapper.TOMBSTONE_NAME - ) != null : "Noop tombstone document but _tombstone field is not set [" + doc + " ]"; + assert doc.getField(SeqNoFieldMapper.TOMBSTONE_NAME) != null + : "Noop tombstone document but _tombstone field is not set [" + doc + " ]"; doc.add(softDeletesField); indexWriter.addDocument(doc); } catch (final Exception ex) { @@ -2235,7 +2190,7 @@ public void forceMerge( } @Override - public IndexCommitRef acquireLastIndexCommit(final boolean flushFirst) throws EngineException { + public GatedCloseable acquireLastIndexCommit(final boolean flushFirst) throws EngineException { // we have to flush outside of the readlock otherwise we might have a problem upgrading // the to a write lock when we fail the engine in this operation if (flushFirst) { @@ -2244,13 +2199,13 @@ public IndexCommitRef acquireLastIndexCommit(final boolean flushFirst) throws En logger.trace("finish flush for snapshot"); } final IndexCommit lastCommit = combinedDeletionPolicy.acquireIndexCommit(false); - return new Engine.IndexCommitRef(lastCommit, () -> releaseIndexCommit(lastCommit)); + return new GatedCloseable<>(lastCommit, () -> releaseIndexCommit(lastCommit)); } @Override - public IndexCommitRef acquireSafeIndexCommit() throws EngineException { + public GatedCloseable acquireSafeIndexCommit() throws EngineException { final IndexCommit safeCommit = combinedDeletionPolicy.acquireIndexCommit(true); - return new Engine.IndexCommitRef(safeCommit, () -> releaseIndexCommit(safeCommit)); + return new GatedCloseable<>(safeCommit, () -> releaseIndexCommit(safeCommit)); } private void releaseIndexCommit(IndexCommit snapshot) throws IOException { @@ -2367,9 +2322,8 @@ public List segments(boolean verbose) { @Override protected final void closeNoLock(String reason, CountDownLatch closedLatch) { if (isClosed.compareAndSet(false, true)) { - assert rwl.isWriteLockedByCurrentThread() - || failEngineLock - .isHeldByCurrentThread() : "Either the write lock must be held or the engine must be currently be failing itself"; + assert rwl.isWriteLockedByCurrentThread() || failEngineLock.isHeldByCurrentThread() + : "Either the write lock must be held or the engine must be currently be failing itself"; try { this.versionMap.clear(); if (internalReaderManager != null) { @@ -2820,26 +2774,10 @@ long getNumDocUpdates() { @Override public Translog.Snapshot newChangesSnapshot( String source, - HistorySource historySource, - MapperService mapperService, long fromSeqNo, long toSeqNo, - boolean requiredFullRange - ) throws IOException { - if (historySource == HistorySource.INDEX) { - return newChangesSnapshot(source, mapperService, fromSeqNo, toSeqNo, requiredFullRange); - } else { - return getTranslog().newSnapshot(fromSeqNo, toSeqNo, requiredFullRange); - } - } - - @Override - public Translog.Snapshot newChangesSnapshot( - String source, - MapperService mapperService, - long fromSeqNo, - long toSeqNo, - boolean requiredFullRange + boolean requiredFullRange, + boolean accurateCount ) throws IOException { ensureOpen(); refreshIfNeeded(source, toSeqNo); @@ -2847,11 +2785,11 @@ public Translog.Snapshot newChangesSnapshot( try { LuceneChangesSnapshot snapshot = new LuceneChangesSnapshot( searcher, - mapperService, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE, fromSeqNo, toSeqNo, - requiredFullRange + requiredFullRange, + accurateCount ); searcher = null; return snapshot; @@ -2867,30 +2805,25 @@ public Translog.Snapshot newChangesSnapshot( } } - @Override - public boolean hasCompleteOperationHistory(String reason, HistorySource historySource, MapperService mapperService, long startingSeqNo) - throws IOException { - if (historySource == HistorySource.INDEX) { - return getMinRetainedSeqNo() <= startingSeqNo; - } else { - final long currentLocalCheckpoint = localCheckpointTracker.getProcessedCheckpoint(); - // avoid scanning translog if not necessary - if (startingSeqNo > currentLocalCheckpoint) { - return true; - } - final LocalCheckpointTracker tracker = new LocalCheckpointTracker(startingSeqNo, startingSeqNo - 1); - try (Translog.Snapshot snapshot = getTranslog().newSnapshot(startingSeqNo, Long.MAX_VALUE)) { - Translog.Operation operation; - while ((operation = snapshot.next()) != null) { - if (operation.seqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO) { - tracker.markSeqNoAsProcessed(operation.seqNo()); - } - } + public int countNumberOfHistoryOperations(String source, long fromSeqNo, long toSeqNo) throws IOException { + ensureOpen(); + refreshIfNeeded(source, toSeqNo); + try (Searcher s = acquireSearcher(source, SearcherScope.INTERNAL)) { + return LuceneChangesSnapshot.countNumberOfHistoryOperations(s, fromSeqNo, toSeqNo); + } catch (IOException e) { + try { + maybeFailEngine(source, e); + } catch (Exception innerException) { + e.addSuppressed(innerException); } - return tracker.getProcessedCheckpoint() >= currentLocalCheckpoint; + throw e; } } + public boolean hasCompleteOperationHistory(String reason, long startingSeqNo) { + return getMinRetainedSeqNo() <= startingSeqNo; + } + /** * Returns the minimum seqno that is retained in the Lucene index. * Operations whose seq# are at least this value should exist in the Lucene index. @@ -2899,13 +2832,8 @@ public final long getMinRetainedSeqNo() { return softDeletesPolicy.getMinRetainedSeqNo(); } - @Override - public Closeable acquireHistoryRetentionLock(HistorySource historySource) { - if (historySource == HistorySource.INDEX) { - return softDeletesPolicy.acquireRetentionLock(); - } else { - return translog.acquireRetentionLock(); - } + public Closeable acquireHistoryRetentionLock() { + return softDeletesPolicy.acquireRetentionLock(); } /** @@ -3037,15 +2965,6 @@ private boolean assertMaxSeqNoOfUpdatesIsAdvanced(Term id, long seqNo, boolean a return true; } - private static void trimUnsafeCommits(EngineConfig engineConfig) throws IOException { - final Store store = engineConfig.getStore(); - final String translogUUID = store.readLastCommittedSegmentsInfo().getUserData().get(Translog.TRANSLOG_UUID_KEY); - final Path translogPath = engineConfig.getTranslogConfig().getTranslogPath(); - final long globalCheckpoint = Translog.readGlobalCheckpoint(translogPath, translogUUID); - final long minRetainedTranslogGen = Translog.readMinTranslogGeneration(translogPath, translogUUID); - store.trimUnsafeCommits(globalCheckpoint, minRetainedTranslogGen, engineConfig.getIndexSettings().getIndexVersionCreated()); - } - /** * Restores the live version map and local checkpoint of this engine using documents (including soft-deleted) * after the local checkpoint in the safe commit. This step ensures the live version map and checkpoint tracker diff --git a/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java b/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java index 76bb47c64ab4c..ae1dc9e647073 100644 --- a/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java +++ b/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java @@ -36,26 +36,25 @@ import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; -import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.DocValuesFieldExistsQuery; +import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TopFieldCollector; import org.apache.lucene.util.ArrayUtil; +import org.opensearch.Version; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.lucene.Lucene; +import org.opensearch.common.lucene.search.Queries; import org.opensearch.core.internal.io.IOUtils; import org.opensearch.index.fieldvisitor.FieldsVisitor; -import org.opensearch.index.mapper.IdFieldMapper; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.SeqNoFieldMapper; import org.opensearch.index.mapper.SourceFieldMapper; -import org.opensearch.index.mapper.Uid; import org.opensearch.index.translog.Translog; import java.io.Closeable; @@ -77,7 +76,6 @@ final class LuceneChangesSnapshot implements Translog.Snapshot { private final boolean requiredFullRange; private final IndexSearcher indexSearcher; - private final MapperService mapperService; private int docIndex = 0; private final int totalHits; private ScoreDoc[] scoreDocs; @@ -88,7 +86,6 @@ final class LuceneChangesSnapshot implements Translog.Snapshot { * Creates a new "translog" snapshot from Lucene for reading operations whose seq# in the specified range. * * @param engineSearcher the internal engine searcher which will be taken over if the snapshot is opened successfully - * @param mapperService the mapper service which will be mainly used to resolve the document's type and uid * @param searchBatchSize the number of documents should be returned by each search * @param fromSeqNo the min requesting seq# - inclusive * @param toSeqNo the maximum requesting seq# - inclusive @@ -96,11 +93,11 @@ final class LuceneChangesSnapshot implements Translog.Snapshot { */ LuceneChangesSnapshot( Engine.Searcher engineSearcher, - MapperService mapperService, int searchBatchSize, long fromSeqNo, long toSeqNo, - boolean requiredFullRange + boolean requiredFullRange, + boolean accurateCount ) throws IOException { if (fromSeqNo < 0 || toSeqNo < 0 || fromSeqNo > toSeqNo) { throw new IllegalArgumentException("Invalid range; from_seqno [" + fromSeqNo + "], to_seqno [" + toSeqNo + "]"); @@ -114,7 +111,6 @@ final class LuceneChangesSnapshot implements Translog.Snapshot { IOUtils.close(engineSearcher); } }; - this.mapperService = mapperService; final long requestingSize = (toSeqNo - fromSeqNo) == Long.MAX_VALUE ? Long.MAX_VALUE : (toSeqNo - fromSeqNo + 1L); this.searchBatchSize = requestingSize < searchBatchSize ? Math.toIntExact(requestingSize) : searchBatchSize; this.fromSeqNo = fromSeqNo; @@ -124,7 +120,7 @@ final class LuceneChangesSnapshot implements Translog.Snapshot { this.indexSearcher = new IndexSearcher(Lucene.wrapAllDocsLive(engineSearcher.getDirectoryReader())); this.indexSearcher.setQueryCache(null); this.parallelArray = new ParallelArray(this.searchBatchSize); - final TopDocs topDocs = searchOperations(null); + final TopDocs topDocs = searchOperations(null, accurateCount); this.totalHits = Math.toIntExact(topDocs.totalHits.value); this.scoreDocs = topDocs.scoreDocs; fillParallelArray(scoreDocs, parallelArray); @@ -200,7 +196,7 @@ private int nextDocIndex() throws IOException { // we have processed all docs in the current search - fetch the next batch if (docIndex == scoreDocs.length && docIndex > 0) { final ScoreDoc prev = scoreDocs[scoreDocs.length - 1]; - scoreDocs = searchOperations(prev).scoreDocs; + scoreDocs = searchOperations((FieldDoc) prev, false).scoreDocs; fillParallelArray(scoreDocs, parallelArray); docIndex = 0; } @@ -249,16 +245,31 @@ private void fillParallelArray(ScoreDoc[] scoreDocs, ParallelArray parallelArray } } - private TopDocs searchOperations(ScoreDoc after) throws IOException { - final Query rangeQuery = new BooleanQuery.Builder().add( - LongPoint.newRangeQuery(SeqNoFieldMapper.NAME, Math.max(fromSeqNo, lastSeenSeqNo), toSeqNo), - BooleanClause.Occur.MUST - ) - // exclude non-root nested documents - .add(new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME), BooleanClause.Occur.MUST) + private static Query operationsRangeQuery(long fromSeqNo, long toSeqNo) { + return new BooleanQuery.Builder().add(LongPoint.newRangeQuery(SeqNoFieldMapper.NAME, fromSeqNo, toSeqNo), BooleanClause.Occur.MUST) + .add(Queries.newNonNestedFilter(Version.CURRENT), BooleanClause.Occur.MUST) // exclude non-root nested docs .build(); + } + + static int countNumberOfHistoryOperations(Engine.Searcher searcher, long fromSeqNo, long toSeqNo) throws IOException { + if (fromSeqNo > toSeqNo || fromSeqNo < 0 || toSeqNo < 0) { + throw new IllegalArgumentException("Invalid sequence range; fromSeqNo [" + fromSeqNo + "] toSeqNo [" + toSeqNo + "]"); + } + IndexSearcher indexSearcher = new IndexSearcher(Lucene.wrapAllDocsLive(searcher.getDirectoryReader())); + return indexSearcher.count(operationsRangeQuery(fromSeqNo, toSeqNo)); + } + + private TopDocs searchOperations(FieldDoc after, boolean accurate) throws IOException { + final Query rangeQuery = operationsRangeQuery(Math.max(fromSeqNo, lastSeenSeqNo), toSeqNo); final Sort sortedBySeqNo = new Sort(new SortField(SeqNoFieldMapper.NAME, SortField.Type.LONG)); - return indexSearcher.searchAfter(after, rangeQuery, searchBatchSize, sortedBySeqNo); + final TopFieldCollector topFieldCollector = TopFieldCollector.create( + sortedBySeqNo, + searchBatchSize, + after, + accurate ? Integer.MAX_VALUE : 0 + ); + indexSearcher.search(rangeQuery, topFieldCollector); + return topFieldCollector.topDocs(); } private Translog.Operation readDocAsOp(int docIndex) throws IOException { @@ -278,20 +289,17 @@ private Translog.Operation readDocAsOp(int docIndex) throws IOException { : SourceFieldMapper.NAME; final FieldsVisitor fields = new FieldsVisitor(true, sourceField); leaf.reader().document(segmentDocID, fields); - fields.postProcess(mapperService); final Translog.Operation op; final boolean isTombstone = parallelArray.isTombStone[docIndex]; - if (isTombstone && fields.uid() == null) { + if (isTombstone && fields.id() == null) { op = new Translog.NoOp(seqNo, primaryTerm, fields.source().utf8ToString()); assert version == 1L : "Noop tombstone should have version 1L; actual version [" + version + "]"; assert assertDocSoftDeleted(leaf.reader(), segmentDocID) : "Noop but soft_deletes field is not set [" + op + "]"; } else { - final String id = fields.uid().id(); - final String type = fields.uid().type(); - final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id)); + final String id = fields.id(); if (isTombstone) { - op = new Translog.Delete(type, id, uid, seqNo, primaryTerm, version); + op = new Translog.Delete(id, seqNo, primaryTerm, version); assert assertDocSoftDeleted(leaf.reader(), segmentDocID) : "Delete op but soft_deletes field is not set [" + op + "]"; } else { final BytesReference source = fields.source(); @@ -310,7 +318,6 @@ private Translog.Operation readDocAsOp(int docIndex) throws IOException { // TODO: pass the latest timestamp from engine. final long autoGeneratedIdTimestamp = -1; op = new Translog.Index( - type, id, seqNo, primaryTerm, diff --git a/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java b/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java index 0491eb0db94cd..43fe10c217270 100644 --- a/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java @@ -41,17 +41,17 @@ import org.apache.lucene.store.Lock; import org.opensearch.LegacyESVersion; import org.opensearch.Version; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.lucene.Lucene; import org.opensearch.common.lucene.index.OpenSearchDirectoryReader; import org.opensearch.common.util.concurrent.ReleasableLock; import org.opensearch.core.internal.io.IOUtils; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.seqno.SeqNoStats; import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.index.store.Store; +import org.opensearch.index.translog.DefaultTranslogDeletionPolicy; import org.opensearch.index.translog.Translog; import org.opensearch.index.translog.TranslogConfig; -import org.opensearch.index.translog.DefaultTranslogDeletionPolicy; import org.opensearch.index.translog.TranslogDeletionPolicy; import org.opensearch.index.translog.TranslogStats; import org.opensearch.search.suggest.completion.CompletionStats; @@ -320,48 +320,29 @@ public boolean ensureTranslogSynced(Stream locations) { public void syncTranslog() {} @Override - public Closeable acquireHistoryRetentionLock(HistorySource historySource) { + public Closeable acquireHistoryRetentionLock() { return () -> {}; } @Override public Translog.Snapshot newChangesSnapshot( String source, - MapperService mapperService, long fromSeqNo, long toSeqNo, - boolean requiredFullRange - ) { - return newEmptySnapshot(); - } - - @Override - public Translog.Snapshot readHistoryOperations( - String reason, - HistorySource historySource, - MapperService mapperService, - long startingSeqNo + boolean requiredFullRange, + boolean accurateCount ) { return newEmptySnapshot(); } @Override - public int estimateNumberOfHistoryOperations( - String reason, - HistorySource historySource, - MapperService mapperService, - long startingSeqNo - ) { - return 0; + public int countNumberOfHistoryOperations(String source, long fromSeqNo, long toSeqNo) throws IOException { + try (Translog.Snapshot snapshot = newChangesSnapshot(source, fromSeqNo, toSeqNo, false, true)) { + return snapshot.totalOperations(); + } } - @Override - public boolean hasCompleteOperationHistory( - String reason, - HistorySource historySource, - MapperService mapperService, - long startingSeqNo - ) { + public boolean hasCompleteOperationHistory(String reason, long startingSeqNo) { // we can do operation-based recovery if we don't have to replay any operation. return startingSeqNo > seqNoStats.getMaxSeqNo(); } @@ -439,13 +420,13 @@ public void forceMerge( ) {} @Override - public IndexCommitRef acquireLastIndexCommit(boolean flushFirst) { + public GatedCloseable acquireLastIndexCommit(boolean flushFirst) { store.incRef(); - return new IndexCommitRef(indexCommit, store::decRef); + return new GatedCloseable<>(indexCommit, store::decRef); } @Override - public IndexCommitRef acquireSafeIndexCommit() { + public GatedCloseable acquireSafeIndexCommit() { return acquireLastIndexCommit(false); } diff --git a/server/src/main/java/org/opensearch/index/fieldvisitor/FieldsVisitor.java b/server/src/main/java/org/opensearch/index/fieldvisitor/FieldsVisitor.java index c5fddb5d26c5b..a51137b4a4f69 100644 --- a/server/src/main/java/org/opensearch/index/fieldvisitor/FieldsVisitor.java +++ b/server/src/main/java/org/opensearch/index/fieldvisitor/FieldsVisitor.java @@ -36,7 +36,6 @@ import org.apache.lucene.util.BytesRef; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; -import org.opensearch.index.mapper.DocumentMapper; import org.opensearch.index.mapper.IdFieldMapper; import org.opensearch.index.mapper.IgnoredFieldMapper; import org.opensearch.index.mapper.MappedFieldType; @@ -67,7 +66,7 @@ public class FieldsVisitor extends StoredFieldVisitor { private final String sourceFieldName; private final Set requiredFields; protected BytesReference source; - protected String type, id; + protected String id; protected Map> fieldsValues; public FieldsVisitor(boolean loadSource) { @@ -98,10 +97,6 @@ public Status needsField(FieldInfo fieldInfo) { } public void postProcess(MapperService mapperService) { - final DocumentMapper mapper = mapperService.documentMapper(); - if (mapper != null) { - type = mapper.type(); - } for (Map.Entry> entry : fields().entrySet()) { MappedFieldType fieldType = mapperService.fieldType(entry.getKey()); if (fieldType == null) { @@ -167,13 +162,8 @@ public BytesReference source() { return source; } - public Uid uid() { - if (id == null) { - return null; - } else if (type == null) { - throw new IllegalStateException("Call postProcess before getting the uid"); - } - return new Uid(type, id); + public String id() { + return id; } public String routing() { @@ -195,7 +185,6 @@ public Map> fields() { public void reset() { if (fieldsValues != null) fieldsValues.clear(); source = null; - type = null; id = null; requiredFields.addAll(BASE_REQUIRED_FIELDS); diff --git a/server/src/main/java/org/opensearch/index/get/GetResult.java b/server/src/main/java/org/opensearch/index/get/GetResult.java index 84417f876d2f9..aa2bf43c5b290 100644 --- a/server/src/main/java/org/opensearch/index/get/GetResult.java +++ b/server/src/main/java/org/opensearch/index/get/GetResult.java @@ -34,6 +34,7 @@ import org.opensearch.LegacyESVersion; import org.opensearch.OpenSearchParseException; +import org.opensearch.Version; import org.opensearch.common.Strings; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.compress.CompressorFactory; @@ -65,7 +66,6 @@ public class GetResult implements Writeable, Iterable, ToXContentObject { public static final String _INDEX = "_index"; - public static final String _TYPE = "_type"; public static final String _ID = "_id"; private static final String _VERSION = "_version"; private static final String _SEQ_NO = "_seq_no"; @@ -74,7 +74,6 @@ public class GetResult implements Writeable, Iterable, ToXContent private static final String FIELDS = "fields"; private String index; - private String type; private String id; private long version; private long seqNo; @@ -88,7 +87,9 @@ public class GetResult implements Writeable, Iterable, ToXContent public GetResult(StreamInput in) throws IOException { index = in.readString(); - type = in.readOptionalString(); + if (in.getVersion().before(Version.V_2_0_0)) { + in.readOptionalString(); + } id = in.readString(); seqNo = in.readZLong(); primaryTerm = in.readVLong(); @@ -121,7 +122,6 @@ public GetResult(StreamInput in) throws IOException { public GetResult( String index, - String type, String id, long seqNo, long primaryTerm, @@ -132,7 +132,6 @@ public GetResult( Map metaFields ) { this.index = index; - this.type = type; this.id = id; this.seqNo = seqNo; this.primaryTerm = primaryTerm; @@ -140,8 +139,8 @@ public GetResult( + seqNo + " primaryTerm: " + primaryTerm; - assert exists - || (seqNo == UNASSIGNED_SEQ_NO && primaryTerm == UNASSIGNED_PRIMARY_TERM) : "doc not found but seqNo/primaryTerm are set"; + assert exists || (seqNo == UNASSIGNED_SEQ_NO && primaryTerm == UNASSIGNED_PRIMARY_TERM) + : "doc not found but seqNo/primaryTerm are set"; this.version = version; this.exists = exists; this.source = source; @@ -163,13 +162,6 @@ public String getIndex() { return index; } - /** - * The type of the document. - */ - public String getType() { - return type; - } - /** * The id of the document. */ @@ -337,7 +329,6 @@ public XContentBuilder toXContentEmbedded(XContentBuilder builder, Params params public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(_INDEX, index); - builder.field(_TYPE, type); builder.field(_ID, id); if (isExists()) { if (version != -1) { @@ -354,10 +345,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public static GetResult fromXContentEmbedded(XContentParser parser) throws IOException { XContentParser.Token token = parser.nextToken(); ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); - return fromXContentEmbedded(parser, null, null, null); + return fromXContentEmbedded(parser, null, null); } - public static GetResult fromXContentEmbedded(XContentParser parser, String index, String type, String id) throws IOException { + public static GetResult fromXContentEmbedded(XContentParser parser, String index, String id) throws IOException { XContentParser.Token token = parser.currentToken(); ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); @@ -375,8 +366,6 @@ public static GetResult fromXContentEmbedded(XContentParser parser, String index } else if (token.isValue()) { if (_INDEX.equals(currentFieldName)) { index = parser.text(); - } else if (_TYPE.equals(currentFieldName)) { - type = parser.text(); } else if (_ID.equals(currentFieldName)) { id = parser.text(); } else if (_VERSION.equals(currentFieldName)) { @@ -414,7 +403,7 @@ public static GetResult fromXContentEmbedded(XContentParser parser, String index } } } - return new GetResult(index, type, id, seqNo, primaryTerm, version, found, source, documentFields, metaFields); + return new GetResult(index, id, seqNo, primaryTerm, version, found, source, documentFields, metaFields); } public static GetResult fromXContent(XContentParser parser) throws IOException { @@ -442,7 +431,9 @@ private Map readFields(StreamInput in) throws IOException @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(index); - out.writeOptionalString(type); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeOptionalString(MapperService.SINGLE_MAPPING_NAME); + } out.writeString(id); out.writeZLong(seqNo); out.writeVLong(primaryTerm); @@ -484,7 +475,6 @@ public boolean equals(Object o) { && primaryTerm == getResult.primaryTerm && exists == getResult.exists && Objects.equals(index, getResult.index) - && Objects.equals(type, getResult.type) && Objects.equals(id, getResult.id) && Objects.equals(documentFields, getResult.documentFields) && Objects.equals(metaFields, getResult.metaFields) @@ -493,7 +483,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(version, seqNo, primaryTerm, exists, index, type, id, documentFields, metaFields, sourceAsMap()); + return Objects.hash(version, seqNo, primaryTerm, exists, index, id, documentFields, metaFields, sourceAsMap()); } @Override diff --git a/server/src/main/java/org/opensearch/index/get/ShardGetService.java b/server/src/main/java/org/opensearch/index/get/ShardGetService.java index 992a53e9ae0da..a877b0085816a 100644 --- a/server/src/main/java/org/opensearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/opensearch/index/get/ShardGetService.java @@ -107,7 +107,6 @@ public GetStats stats() { } public GetResult get( - String type, String id, String[] gFields, boolean realtime, @@ -115,11 +114,10 @@ public GetResult get( VersionType versionType, FetchSourceContext fetchSourceContext ) { - return get(type, id, gFields, realtime, version, versionType, UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM, fetchSourceContext); + return get(id, gFields, realtime, version, versionType, UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM, fetchSourceContext); } private GetResult get( - String type, String id, String[] gFields, boolean realtime, @@ -132,7 +130,7 @@ private GetResult get( currentMetric.inc(); try { long now = System.nanoTime(); - GetResult getResult = innerGet(type, id, gFields, realtime, version, versionType, ifSeqNo, ifPrimaryTerm, fetchSourceContext); + GetResult getResult = innerGet(id, gFields, realtime, version, versionType, ifSeqNo, ifPrimaryTerm, fetchSourceContext); if (getResult.isExists()) { existsMetric.inc(System.nanoTime() - now); @@ -145,9 +143,8 @@ private GetResult get( } } - public GetResult getForUpdate(String type, String id, long ifSeqNo, long ifPrimaryTerm) { + public GetResult getForUpdate(String id, long ifSeqNo, long ifPrimaryTerm) { return get( - type, id, new String[] { RoutingFieldMapper.NAME }, true, @@ -166,16 +163,16 @@ public GetResult getForUpdate(String type, String id, long ifSeqNo, long ifPrima *

        * Note: Call must release engine searcher associated with engineGetResult! */ - public GetResult get(Engine.GetResult engineGetResult, String id, String type, String[] fields, FetchSourceContext fetchSourceContext) { + public GetResult get(Engine.GetResult engineGetResult, String id, String[] fields, FetchSourceContext fetchSourceContext) { if (!engineGetResult.exists()) { - return new GetResult(shardId.getIndexName(), type, id, UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM, -1, false, null, null, null); + return new GetResult(shardId.getIndexName(), id, UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM, -1, false, null, null, null); } currentMetric.inc(); try { long now = System.nanoTime(); fetchSourceContext = normalizeFetchSourceContent(fetchSourceContext, fields); - GetResult getResult = innerGetLoadFromStoredFields(type, id, fields, fetchSourceContext, engineGetResult, mapperService); + GetResult getResult = innerGetLoadFromStoredFields(id, fields, fetchSourceContext, engineGetResult, mapperService); if (getResult.isExists()) { existsMetric.inc(System.nanoTime() - now); } else { @@ -206,7 +203,6 @@ private FetchSourceContext normalizeFetchSourceContent(@Nullable FetchSourceCont } private GetResult innerGet( - String type, String id, String[] gFields, boolean realtime, @@ -217,40 +213,31 @@ private GetResult innerGet( FetchSourceContext fetchSourceContext ) { fetchSourceContext = normalizeFetchSourceContent(fetchSourceContext, gFields); - if (type == null || type.equals("_all")) { - DocumentMapper mapper = mapperService.documentMapper(); - type = mapper == null ? null : mapper.type(); - } - Engine.GetResult get = null; - if (type != null) { - Term uidTerm = new Term(IdFieldMapper.NAME, Uid.encodeId(id)); - get = indexShard.get( - new Engine.Get(realtime, realtime, type, id, uidTerm).version(version) - .versionType(versionType) - .setIfSeqNo(ifSeqNo) - .setIfPrimaryTerm(ifPrimaryTerm) - ); - assert get.isFromTranslog() == false || realtime : "should only read from translog if realtime enabled"; - if (get.exists() == false) { - get.close(); - } + Term uidTerm = new Term(IdFieldMapper.NAME, Uid.encodeId(id)); + Engine.GetResult get = indexShard.get( + new Engine.Get(realtime, true, id, uidTerm).version(version) + .versionType(versionType) + .setIfSeqNo(ifSeqNo) + .setIfPrimaryTerm(ifPrimaryTerm) + ); + if (get.exists() == false) { + get.close(); } if (get == null || get.exists() == false) { - return new GetResult(shardId.getIndexName(), type, id, UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM, -1, false, null, null, null); + return new GetResult(shardId.getIndexName(), id, UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM, -1, false, null, null, null); } try { // break between having loaded it from translog (so we only have _source), and having a document to load - return innerGetLoadFromStoredFields(type, id, gFields, fetchSourceContext, get, mapperService); + return innerGetLoadFromStoredFields(id, gFields, fetchSourceContext, get, mapperService); } finally { get.close(); } } private GetResult innerGetLoadFromStoredFields( - String type, String id, String[] storedFields, FetchSourceContext fetchSourceContext, @@ -289,7 +276,7 @@ private GetResult innerGetLoadFromStoredFields( try { docIdAndVersion.reader.document(docIdAndVersion.docId, fieldVisitor); } catch (IOException e) { - throw new OpenSearchException("Failed to get type [" + type + "] and id [" + id + "]", e); + throw new OpenSearchException("Failed to get id [" + id + "]", e); } source = fieldVisitor.source(); @@ -308,7 +295,6 @@ private GetResult innerGetLoadFromStoredFields( assert source != null : "original source in translog must exist"; SourceToParse sourceToParse = new SourceToParse( shardId.getIndexName(), - type, id, source, XContentHelper.xContentType(source), @@ -417,13 +403,12 @@ private GetResult innerGetLoadFromStoredFields( try { source = BytesReference.bytes(XContentFactory.contentBuilder(sourceContentType).map(sourceAsMap)); } catch (IOException e) { - throw new OpenSearchException("Failed to get type [" + type + "] and id [" + id + "] with includes/excludes set", e); + throw new OpenSearchException("Failed to get id [" + id + "] with includes/excludes set", e); } } return new GetResult( shardId.getIndexName(), - type, id, get.docIdAndVersion().seqNo, get.docIdAndVersion().primaryTerm, diff --git a/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java index d85c18e272793..0ee0a3cb9a180 100644 --- a/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java @@ -81,7 +81,7 @@ public Builder(RootObjectMapper.Builder builder, MapperService mapperService) { this.rootObjectMapper = builder.build(builderContext); final String type = rootObjectMapper.name(); - final DocumentMapper existingMapper = mapperService.documentMapper(type); + final DocumentMapper existingMapper = mapperService.documentMapper(); final Version indexCreatedVersion = mapperService.getIndexSettings().getIndexVersionCreated(); final Map metadataMapperParsers = mapperService.mapperRegistry.getMetadataMapperParsers( indexCreatedVersion @@ -153,7 +153,7 @@ public DocumentMapper(MapperService mapperService, Mapping mapping) { this.fieldMappers = MappingLookup.fromMapping(this.mapping, indexAnalyzers.getDefaultIndexAnalyzer()); try { - mappingSource = new CompressedXContent(this, XContentType.JSON, ToXContent.EMPTY_PARAMS); + mappingSource = new CompressedXContent(this, ToXContent.EMPTY_PARAMS); } catch (Exception e) { throw new OpenSearchGenerationException("failed to serialize source for type [" + type + "]", e); } @@ -208,14 +208,6 @@ public T metadataMapper(Class type) { return mapping.metadataMapper(type); } - public IndexFieldMapper indexMapper() { - return metadataMapper(IndexFieldMapper.class); - } - - public TypeFieldMapper typeMapper() { - return metadataMapper(TypeFieldMapper.class); - } - public SourceFieldMapper sourceMapper() { return metadataMapper(SourceFieldMapper.class); } @@ -252,14 +244,14 @@ public ParsedDocument parse(SourceToParse source) throws MapperParsingException return documentParser.parseDocument(source, mapping.metadataMappers); } - public ParsedDocument createDeleteTombstoneDoc(String index, String type, String id) throws MapperParsingException { - final SourceToParse emptySource = new SourceToParse(index, type, id, new BytesArray("{}"), XContentType.JSON); + public ParsedDocument createDeleteTombstoneDoc(String index, String id) throws MapperParsingException { + final SourceToParse emptySource = new SourceToParse(index, id, new BytesArray("{}"), XContentType.JSON); return documentParser.parseDocument(emptySource, deleteTombstoneMetadataFieldMappers).toTombstone(); } public ParsedDocument createNoopTombstoneDoc(String index, String reason) throws MapperParsingException { final String id = ""; // _id won't be used. - final SourceToParse sourceToParse = new SourceToParse(index, type, id, new BytesArray("{}"), XContentType.JSON); + final SourceToParse sourceToParse = new SourceToParse(index, id, new BytesArray("{}"), XContentType.JSON); final ParsedDocument parsedDoc = documentParser.parseDocument(sourceToParse, noopTombstoneMetadataFieldMappers).toTombstone(); // Store the reason of a noop as a raw string in the _source field final BytesRef byteRef = new BytesRef(reason); diff --git a/server/src/main/java/org/opensearch/index/mapper/DocumentMapperParser.java b/server/src/main/java/org/opensearch/index/mapper/DocumentMapperParser.java index 8b406c4691018..2d6880c6b4186 100644 --- a/server/src/main/java/org/opensearch/index/mapper/DocumentMapperParser.java +++ b/server/src/main/java/org/opensearch/index/mapper/DocumentMapperParser.java @@ -114,10 +114,6 @@ public Mapper.TypeParser.ParserContext parserContext(DateFormatter dateFormatter } public DocumentMapper parse(@Nullable String type, CompressedXContent source) throws MapperParsingException { - return parse(type, source, null); - } - - public DocumentMapper parse(@Nullable String type, CompressedXContent source, String defaultSource) throws MapperParsingException { Map mapping = null; if (source != null) { Map root = XContentHelper.convertToMap(source.compressedReference(), true, XContentType.JSON).v2(); @@ -128,22 +124,14 @@ public DocumentMapper parse(@Nullable String type, CompressedXContent source, St if (mapping == null) { mapping = new HashMap<>(); } - return parse(type, mapping, defaultSource); + return parse(type, mapping); } @SuppressWarnings({ "unchecked" }) - private DocumentMapper parse(String type, Map mapping, String defaultSource) throws MapperParsingException { + private DocumentMapper parse(String type, Map mapping) throws MapperParsingException { if (type == null) { throw new MapperParsingException("Failed to derive type"); } - - if (defaultSource != null) { - Tuple> t = extractMapping(MapperService.DEFAULT_MAPPING, defaultSource); - if (t.v2() != null) { - XContentHelper.mergeDefaults(mapping, t.v2()); - } - } - Mapper.TypeParser.ParserContext parserContext = parserContext(); // parse RootObjectMapper DocumentMapper.Builder docBuilder = new DocumentMapper.Builder( diff --git a/server/src/main/java/org/opensearch/index/mapper/DocumentParser.java b/server/src/main/java/org/opensearch/index/mapper/DocumentParser.java index c7317ef639eff..bcafddd6d5816 100644 --- a/server/src/main/java/org/opensearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/opensearch/index/mapper/DocumentParser.java @@ -53,7 +53,6 @@ import java.util.Collections; import java.util.Iterator; import java.util.List; -import java.util.Objects; import static org.opensearch.index.mapper.FieldMapper.IGNORE_MALFORMED_SETTING; @@ -71,8 +70,6 @@ final class DocumentParser { } ParsedDocument parseDocument(SourceToParse source, MetadataFieldMapper[] metadataFieldsMappers) throws MapperParsingException { - validateType(source); - final Mapping mapping = docMapper.mapping(); final ParseContext.InternalParseContext context; final XContentType xContentType = source.getXContentType(); @@ -140,21 +137,6 @@ private static void internalParseDocument( } } - private void validateType(SourceToParse source) { - if (docMapper.type().equals(MapperService.DEFAULT_MAPPING)) { - throw new IllegalArgumentException("It is forbidden to index into the default mapping [" + MapperService.DEFAULT_MAPPING + "]"); - } - - if (Objects.equals(source.type(), docMapper.type()) == false && MapperService.SINGLE_MAPPING_NAME.equals(source.type()) == false) { // used - // by - // typeless - // APIs - throw new MapperParsingException( - "Type mismatch, provide type [" + source.type() + "] but mapper is of type [" + docMapper.type() + "]" - ); - } - } - private static void validateStart(XContentParser parser) throws IOException { // will result in START_OBJECT XContentParser.Token token = parser.nextToken(); @@ -193,7 +175,6 @@ private static ParsedDocument parsedDocument(SourceToParse source, ParseContext. context.version(), context.seqID(), context.sourceToParse().id(), - context.sourceToParse().type(), source.routing(), context.docs(), context.sourceToParse().source(), diff --git a/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java index 1bf8686f07cfe..575cfc8ca424b 100644 --- a/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java @@ -59,6 +59,7 @@ import org.opensearch.index.analysis.NamedAnalyzer; import org.opensearch.index.fielddata.IndexFieldData; import org.opensearch.index.query.DistanceFeatureQueryBuilder; +import org.opensearch.index.query.IntervalMode; import org.opensearch.index.query.QueryRewriteContext; import org.opensearch.index.query.QueryShardContext; import org.opensearch.index.query.QueryShardException; @@ -365,7 +366,7 @@ public Query distanceFeatureQuery(Object origin, String pivot, float boost, Quer /** * Create an {@link IntervalsSource} to be used for proximity queries */ - public IntervalsSource intervals(String query, int max_gaps, boolean ordered, NamedAnalyzer analyzer, boolean prefix) + public IntervalsSource intervals(String query, int max_gaps, IntervalMode mode, NamedAnalyzer analyzer, boolean prefix) throws IOException { throw new IllegalArgumentException( "Can only use interval queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]" diff --git a/server/src/main/java/org/opensearch/index/mapper/Mapper.java b/server/src/main/java/org/opensearch/index/mapper/Mapper.java index 27507ff78c742..f9f16a33a0c52 100644 --- a/server/src/main/java/org/opensearch/index/mapper/Mapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/Mapper.java @@ -33,6 +33,8 @@ package org.opensearch.index.mapper; import org.opensearch.Version; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.common.Nullable; import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateFormatter; import org.opensearch.common.xcontent.ToXContentFragment; @@ -69,6 +71,14 @@ public Settings indexSettings() { public Version indexCreatedVersion() { return Version.indexCreated(indexSettings); } + + public Version indexCreatedVersionOrDefault(@Nullable Version defaultValue) { + if (defaultValue == null || hasIndexCreated(indexSettings)) { + return indexCreatedVersion(); + } else { + return defaultValue; + } + } } public abstract static class Builder { @@ -240,4 +250,12 @@ public final String simpleName() { */ public abstract void validate(MappingLookup mappers); + /** + * Check if settings have IndexMetadata.SETTING_INDEX_VERSION_CREATED setting. + * @param settings settings + * @return "true" if settings have IndexMetadata.SETTING_INDEX_VERSION_CREATED setting, "false" otherwise + */ + protected static boolean hasIndexCreated(Settings settings) { + return settings.hasValue(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey()); + } } diff --git a/server/src/main/java/org/opensearch/index/mapper/MapperService.java b/server/src/main/java/org/opensearch/index/mapper/MapperService.java index 765f5dc2d2f24..a92647929ff08 100644 --- a/server/src/main/java/org/opensearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/opensearch/index/mapper/MapperService.java @@ -32,7 +32,6 @@ package org.opensearch.index.mapper; -import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; @@ -41,7 +40,6 @@ import org.opensearch.Version; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.metadata.MappingMetadata; -import org.opensearch.common.Nullable; import org.opensearch.common.Strings; import org.opensearch.common.compress.CompressedXContent; import org.opensearch.common.logging.DeprecationLogger; @@ -115,7 +113,6 @@ public enum MergeReason { MAPPING_RECOVERY; } - public static final String DEFAULT_MAPPING = "_default_"; public static final String SINGLE_MAPPING_NAME = "_doc"; public static final Setting INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING = Setting.longSetting( "index.mapping.nested_fields.limit", @@ -175,10 +172,7 @@ public enum MergeReason { private final IndexAnalyzers indexAnalyzers; - private volatile String defaultMappingSource; - private volatile DocumentMapper mapper; - private volatile DocumentMapper defaultMapper; private final DocumentMapperParser documentParser; private final Version indexVersionCreated; @@ -231,12 +225,6 @@ public MapperService( && indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_0_0)) { throw new IllegalArgumentException("Setting " + INDEX_MAPPER_DYNAMIC_SETTING.getKey() + " was removed after version 6.0.0"); } - - defaultMappingSource = "{\"_default_\":{}}"; - - if (logger.isTraceEnabled()) { - logger.trace("default mapping source[{}]", defaultMappingSource); - } } public boolean hasNested() { @@ -286,9 +274,6 @@ public boolean updateMapping(final IndexMetadata currentIndexMetadata, final Ind if (mapper != null) { existingMappers.add(mapper.type()); } - if (defaultMapper != null) { - existingMappers.add(DEFAULT_MAPPING); - } final Map updatedEntries; try { // only update entries if needed @@ -304,13 +289,8 @@ public boolean updateMapping(final IndexMetadata currentIndexMetadata, final Ind for (DocumentMapper documentMapper : updatedEntries.values()) { String mappingType = documentMapper.type(); - MappingMetadata mappingMetadata; - if (mappingType.equals(MapperService.DEFAULT_MAPPING)) { - mappingMetadata = newIndexMetadata.defaultMapping(); - } else { - mappingMetadata = newIndexMetadata.mapping(); - assert mappingType.equals(mappingMetadata.type()); - } + MappingMetadata mappingMetadata = newIndexMetadata.mapping(); + assert mappingType.equals(mappingMetadata.type()); CompressedXContent incomingMappingSource = mappingMetadata.source(); String op = existingMappers.contains(mappingType) ? "updated" : "added"; @@ -325,13 +305,13 @@ public boolean updateMapping(final IndexMetadata currentIndexMetadata, final Ind // refresh mapping can happen when the parsing/merging of the mapping from the metadata doesn't result in the same // mapping, in this case, we send to the master to refresh its own version of the mappings (to conform with the // merge version of it, which it does when refreshing the mappings), and warn log it. - if (documentMapper(mappingType).mappingSource().equals(incomingMappingSource) == false) { + if (documentMapper().mappingSource().equals(incomingMappingSource) == false) { logger.debug( "[{}] parsed mapping [{}], and got different sources\noriginal:\n{}\nparsed:\n{}", index(), mappingType, incomingMappingSource, - documentMapper(mappingType).mappingSource() + documentMapper().mappingSource() ); requireRefresh = true; @@ -351,20 +331,6 @@ private void assertMappingVersion( // if the mapping version is unchanged, then there should not be any updates and all mappings should be the same assert updatedEntries.isEmpty() : updatedEntries; - MappingMetadata defaultMapping = newIndexMetadata.defaultMapping(); - if (defaultMapping != null) { - final CompressedXContent currentSource = currentIndexMetadata.defaultMapping().source(); - final CompressedXContent newSource = defaultMapping.source(); - assert currentSource.equals(newSource) : "expected current mapping [" - + currentSource - + "] for type [" - + defaultMapping.type() - + "] " - + "to be the same as new mapping [" - + newSource - + "]"; - } - MappingMetadata mapping = newIndexMetadata.mapping(); if (mapping != null) { final CompressedXContent currentSource = currentIndexMetadata.mapping().source(); @@ -400,13 +366,8 @@ private void assertMappingVersion( + "]"; assert updatedEntries.isEmpty() == false; for (final DocumentMapper documentMapper : updatedEntries.values()) { - final MappingMetadata currentMapping; - if (documentMapper.type().equals(MapperService.DEFAULT_MAPPING)) { - currentMapping = currentIndexMetadata.defaultMapping(); - } else { - currentMapping = currentIndexMetadata.mapping(); - assert currentMapping == null || documentMapper.type().equals(currentMapping.type()); - } + final MappingMetadata currentMapping = currentIndexMetadata.mapping(); + assert currentMapping == null || documentMapper.type().equals(currentMapping.type()); if (currentMapping != null) { final CompressedXContent currentSource = currentMapping.source(); final CompressedXContent newSource = documentMapper.mappingSource(); @@ -448,66 +409,35 @@ public void merge(IndexMetadata indexMetadata, MergeReason reason) { } public DocumentMapper merge(String type, CompressedXContent mappingSource, MergeReason reason) { - return internalMerge(Collections.singletonMap(type, mappingSource), reason).get(type); + return internalMerge(Collections.singletonMap(type, mappingSource), reason).values().iterator().next(); } private synchronized Map internalMerge(IndexMetadata indexMetadata, MergeReason reason) { assert reason != MergeReason.MAPPING_UPDATE_PREFLIGHT; Map map = new LinkedHashMap<>(); - for (ObjectCursor cursor : indexMetadata.getMappings().values()) { - MappingMetadata mappingMetadata = cursor.value; + MappingMetadata mappingMetadata = indexMetadata.mapping(); + if (mappingMetadata != null) { map.put(mappingMetadata.type(), mappingMetadata.source()); } return internalMerge(map, reason); } private synchronized Map internalMerge(Map mappings, MergeReason reason) { - DocumentMapper defaultMapper = null; - String defaultMappingSource = null; - - if (mappings.containsKey(DEFAULT_MAPPING)) { - // verify we can parse it - // NOTE: never apply the default here - try { - defaultMapper = documentParser.parse(DEFAULT_MAPPING, mappings.get(DEFAULT_MAPPING)); - } catch (Exception e) { - throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, DEFAULT_MAPPING, e.getMessage()); - } - defaultMappingSource = mappings.get(DEFAULT_MAPPING).string(); - } - - final String defaultMappingSourceOrLastStored; - if (defaultMappingSource != null) { - defaultMappingSourceOrLastStored = defaultMappingSource; - } else { - defaultMappingSourceOrLastStored = this.defaultMappingSource; - } - DocumentMapper documentMapper = null; for (Map.Entry entry : mappings.entrySet()) { String type = entry.getKey(); - if (type.equals(DEFAULT_MAPPING)) { - continue; - } - if (documentMapper != null) { throw new IllegalArgumentException("Cannot put multiple mappings: " + mappings.keySet()); } - final boolean applyDefault = - // the default was already applied if we are recovering - reason != MergeReason.MAPPING_RECOVERY - // only apply the default mapping if we don't have the type yet - && this.mapper == null; - try { - documentMapper = documentParser.parse(type, entry.getValue(), applyDefault ? defaultMappingSourceOrLastStored : null); + documentMapper = documentParser.parse(type, entry.getValue()); } catch (Exception e) { throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, entry.getKey(), e.getMessage()); } } - return internalMerge(defaultMapper, defaultMappingSource, documentMapper, reason); + return internalMerge(documentMapper, reason); } static void validateTypeName(String type) { @@ -535,25 +465,8 @@ static void validateTypeName(String type) { } } - private synchronized Map internalMerge( - @Nullable DocumentMapper defaultMapper, - @Nullable String defaultMappingSource, - DocumentMapper mapper, - MergeReason reason - ) { - + private synchronized Map internalMerge(DocumentMapper mapper, MergeReason reason) { Map results = new LinkedHashMap<>(2); - - if (defaultMapper != null) { - if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_0_0)) { - throw new IllegalArgumentException(DEFAULT_MAPPING_ERROR_MESSAGE); - } else if (reason == MergeReason.MAPPING_UPDATE) { // only log in case of explicit mapping updates - deprecationLogger.deprecate("default_mapping_not_allowed", DEFAULT_MAPPING_ERROR_MESSAGE); - } - assert defaultMapper.type().equals(DEFAULT_MAPPING); - results.put(DEFAULT_MAPPING, defaultMapper); - } - DocumentMapper newMapper = null; if (mapper != null) { // check naming @@ -580,10 +493,6 @@ private synchronized Map internalMerge( } // commit the change - if (defaultMappingSource != null) { - this.defaultMappingSource = defaultMappingSource; - this.defaultMapper = defaultMapper; - } if (newMapper != null) { this.mapper = newMapper; } @@ -595,7 +504,7 @@ private synchronized Map internalMerge( private boolean assertSerialization(DocumentMapper mapper) { // capture the source now, it may change due to concurrent parsing final CompressedXContent mappingSource = mapper.mappingSource(); - DocumentMapper newMapper = parse(mapper.type(), mappingSource, false); + DocumentMapper newMapper = parse(mapper.type(), mappingSource); if (newMapper.mappingSource().equals(mappingSource) == false) { throw new IllegalStateException( @@ -609,8 +518,8 @@ private boolean assertSerialization(DocumentMapper mapper) { return true; } - public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException { - return documentParser.parse(mappingType, mappingSource, applyDefault ? defaultMappingSource : null); + public DocumentMapper parse(String mappingType, CompressedXContent mappingSource) throws MapperParsingException { + return documentParser.parse(mappingType, mappingSource); } /** @@ -620,21 +529,6 @@ public DocumentMapper documentMapper() { return mapper; } - /** - * Return the {@link DocumentMapper} for the given type. By using the special - * {@value #DEFAULT_MAPPING} type, you can get a {@link DocumentMapper} for - * the default mapping. - */ - public DocumentMapper documentMapper(String type) { - if (mapper != null && type.equals(mapper.type())) { - return mapper; - } - if (DEFAULT_MAPPING.equals(type)) { - return defaultMapper; - } - return null; - } - /** * Returns {@code true} if the given {@code mappingSource} includes a type * as a top-level object. @@ -648,15 +542,6 @@ public static boolean isMappingSourceTyped(String type, CompressedXContent mappi return isMappingSourceTyped(type, root); } - /** - * If the _type name is _doc and there is no _doc top-level key then this means that we - * are handling a typeless call. In such a case, we override _doc with the actual type - * name in the mappings. This allows to use typeless APIs on typed indices. - */ - public String getTypeForUpdate(String type, CompressedXContent mappingSource) { - return isMappingSourceTyped(type, mappingSource) == false ? resolveDocumentType(type) : type; - } - /** * Resolves a type from a mapping-related request into the type that should be used when * merging and updating mappings. @@ -678,12 +563,12 @@ public String resolveDocumentType(String type) { * Returns the document mapper created, including a mapping update if the * type has been dynamically created. */ - public DocumentMapperForType documentMapperWithAutoCreate(String type) { - DocumentMapper mapper = documentMapper(type); + public DocumentMapperForType documentMapperWithAutoCreate() { + DocumentMapper mapper = documentMapper(); if (mapper != null) { return new DocumentMapperForType(mapper, null); } - mapper = parse(type, null, true); + mapper = parse(SINGLE_MAPPING_NAME, null); return new DocumentMapperForType(mapper, mapper.mapping()); } diff --git a/server/src/main/java/org/opensearch/index/mapper/ParsedDocument.java b/server/src/main/java/org/opensearch/index/mapper/ParsedDocument.java index 2d3b5fc1bb9dc..6991db3306ea7 100644 --- a/server/src/main/java/org/opensearch/index/mapper/ParsedDocument.java +++ b/server/src/main/java/org/opensearch/index/mapper/ParsedDocument.java @@ -47,7 +47,7 @@ public class ParsedDocument { private final Field version; - private final String id, type; + private final String id; private final SeqNoFieldMapper.SequenceIDFields seqID; private final String routing; @@ -63,7 +63,6 @@ public ParsedDocument( Field version, SeqNoFieldMapper.SequenceIDFields seqID, String id, - String type, String routing, List documents, BytesReference source, @@ -73,7 +72,6 @@ public ParsedDocument( this.version = version; this.seqID = seqID; this.id = id; - this.type = type; this.routing = routing; this.documents = documents; this.source = source; @@ -85,10 +83,6 @@ public String id() { return this.id; } - public String type() { - return this.type; - } - public Field version() { return version; } diff --git a/server/src/main/java/org/opensearch/index/mapper/RangeFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/RangeFieldMapper.java index 2464f482e3377..6ddb6d21ed5c1 100644 --- a/server/src/main/java/org/opensearch/index/mapper/RangeFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/RangeFieldMapper.java @@ -36,6 +36,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.opensearch.OpenSearchException; +import org.opensearch.Version; import org.opensearch.common.Explicit; import org.opensearch.common.collect.Tuple; import org.opensearch.common.geo.ShapeRelation; @@ -116,15 +117,17 @@ public static class Builder extends ParametrizedFieldMapper.Builder { private final Parameter> meta = Parameter.metaParam(); private final RangeType type; + private final Version indexCreatedVersion; public Builder(String name, RangeType type, Settings settings) { - this(name, type, COERCE_SETTING.get(settings)); + this(name, type, COERCE_SETTING.get(settings), hasIndexCreated(settings) ? Version.indexCreated(settings) : null); } - public Builder(String name, RangeType type, boolean coerceByDefault) { + public Builder(String name, RangeType type, boolean coerceByDefault, Version indexCreatedVersion) { super(name); this.type = type; this.coerce = Parameter.explicitBoolParam("coerce", true, m -> toType(m).coerce, coerceByDefault); + this.indexCreatedVersion = indexCreatedVersion; if (this.type != RangeType.DATE) { format.neverSerialize(); locale.neverSerialize(); @@ -157,8 +160,11 @@ protected RangeFieldType setupFieldType(BuilderContext context) { + " type" ); } + + // The builder context may not have index created version, falling back to indexCreatedVersion + // property of this mapper builder. DateFormatter dateTimeFormatter; - if (Joda.isJodaPattern(context.indexCreatedVersion(), format.getValue())) { + if (Joda.isJodaPattern(context.indexCreatedVersionOrDefault(indexCreatedVersion), format.getValue())) { dateTimeFormatter = Joda.forPattern(format.getValue()).withLocale(locale.getValue()); } else { dateTimeFormatter = DateFormatter.forPattern(format.getValue()).withLocale(locale.getValue()); @@ -371,6 +377,7 @@ public Query rangeQuery( private final Locale locale; private final boolean coerceByDefault; + private final Version indexCreatedVersion; private RangeFieldMapper( String simpleName, @@ -389,6 +396,7 @@ private RangeFieldMapper( this.format = builder.format.getValue(); this.locale = builder.locale.getValue(); this.coerceByDefault = builder.coerce.getDefaultValue().value(); + this.indexCreatedVersion = builder.indexCreatedVersion; } boolean coerce() { @@ -397,7 +405,7 @@ boolean coerce() { @Override public ParametrizedFieldMapper.Builder getMergeBuilder() { - return new Builder(simpleName(), type, coerceByDefault).init(this); + return new Builder(simpleName(), type, coerceByDefault, indexCreatedVersion).init(this); } @Override diff --git a/server/src/main/java/org/opensearch/index/mapper/SourceToParse.java b/server/src/main/java/org/opensearch/index/mapper/SourceToParse.java index 37ecf9491e4b3..4aa8d3117bc9c 100644 --- a/server/src/main/java/org/opensearch/index/mapper/SourceToParse.java +++ b/server/src/main/java/org/opensearch/index/mapper/SourceToParse.java @@ -45,17 +45,14 @@ public class SourceToParse { private final String index; - private final String type; - private final String id; private final @Nullable String routing; private final XContentType xContentType; - public SourceToParse(String index, String type, String id, BytesReference source, XContentType xContentType, @Nullable String routing) { + public SourceToParse(String index, String id, BytesReference source, XContentType xContentType, @Nullable String routing) { this.index = Objects.requireNonNull(index); - this.type = Objects.requireNonNull(type); this.id = Objects.requireNonNull(id); // we always convert back to byte array, since we store it and Field only supports bytes.. // so, we might as well do it here, and improve the performance of working with direct byte arrays @@ -64,8 +61,8 @@ public SourceToParse(String index, String type, String id, BytesReference source this.routing = routing; } - public SourceToParse(String index, String type, String id, BytesReference source, XContentType xContentType) { - this(index, type, id, source, xContentType, null); + public SourceToParse(String index, String id, BytesReference source, XContentType xContentType) { + this(index, id, source, xContentType, null); } public BytesReference source() { @@ -76,10 +73,6 @@ public String index() { return this.index; } - public String type() { - return this.type; - } - public String id() { return this.id; } diff --git a/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java index 064ef178bc90f..bcb3134e532d7 100644 --- a/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java @@ -85,6 +85,7 @@ import org.opensearch.index.fielddata.plain.PagedBytesIndexFieldData; import org.opensearch.index.mapper.Mapper.TypeParser.ParserContext; import org.opensearch.index.query.IntervalBuilder; +import org.opensearch.index.query.IntervalMode; import org.opensearch.index.query.QueryShardContext; import org.opensearch.index.similarity.SimilarityProvider; import org.opensearch.search.aggregations.support.CoreValuesSourceType; @@ -789,7 +790,7 @@ public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRew } @Override - public IntervalsSource intervals(String text, int maxGaps, boolean ordered, NamedAnalyzer analyzer, boolean prefix) + public IntervalsSource intervals(String text, int maxGaps, IntervalMode mode, NamedAnalyzer analyzer, boolean prefix) throws IOException { if (getTextSearchInfo().hasPositions() == false) { throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); @@ -805,7 +806,7 @@ public IntervalsSource intervals(String text, int maxGaps, boolean ordered, Name return Intervals.prefix(normalizedTerm); } IntervalBuilder builder = new IntervalBuilder(name(), analyzer == null ? getTextSearchInfo().getSearchAnalyzer() : analyzer); - return builder.analyzeText(text, maxGaps, ordered); + return builder.analyzeText(text, maxGaps, mode); } @Override diff --git a/server/src/main/java/org/opensearch/index/mapper/TypeFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/TypeFieldMapper.java index ce7bdd3682d83..9adb1430b3df0 100644 --- a/server/src/main/java/org/opensearch/index/mapper/TypeFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/TypeFieldMapper.java @@ -186,7 +186,7 @@ public void preParse(ParseContext context) { if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored()) { return; } - context.doc().add(new Field(fieldType().name(), context.sourceToParse().type(), fieldType)); + context.doc().add(new Field(fieldType().name(), MapperService.SINGLE_MAPPING_NAME, fieldType)); if (fieldType().hasDocValues()) { context.doc().add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(MapperService.SINGLE_MAPPING_NAME))); } diff --git a/server/src/main/java/org/opensearch/index/mapper/Uid.java b/server/src/main/java/org/opensearch/index/mapper/Uid.java index 90d54319c026b..fa149b9dcac46 100644 --- a/server/src/main/java/org/opensearch/index/mapper/Uid.java +++ b/server/src/main/java/org/opensearch/index/mapper/Uid.java @@ -43,52 +43,13 @@ public final class Uid { public static final char DELIMITER = '#'; public static final byte DELIMITER_BYTE = 0x23; - private final String type; - - private final String id; - - public Uid(String type, String id) { - this.type = type; - this.id = id; - } - - public String type() { - return type; - } - - public String id() { - return id; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Uid uid = (Uid) o; - - if (id != null ? !id.equals(uid.id) : uid.id != null) return false; - if (type != null ? !type.equals(uid.type) : uid.type != null) return false; - - return true; - } - - @Override - public int hashCode() { - int result = type != null ? type.hashCode() : 0; - result = 31 * result + (id != null ? id.hashCode() : 0); - return result; - } - - @Override - public String toString() { - return type + "#" + id; - } - private static final int UTF8 = 0xff; private static final int NUMERIC = 0xfe; private static final int BASE64_ESCAPE = 0xfd; + // non-instantiable + private Uid() {} + static boolean isURLBase64WithoutPadding(String id) { // We are not lenient about padding chars ('=') otherwise // 'xxx=' and 'xxx' could be considered the same id diff --git a/server/src/main/java/org/opensearch/index/query/AbstractGeometryQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/AbstractGeometryQueryBuilder.java index ca7e97cfb8bac..9281f1767d72d 100644 --- a/server/src/main/java/org/opensearch/index/query/AbstractGeometryQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/AbstractGeometryQueryBuilder.java @@ -35,11 +35,11 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.SetOnce; +import org.opensearch.Version; import org.opensearch.action.ActionListener; import org.opensearch.action.get.GetRequest; import org.opensearch.action.get.GetResponse; import org.opensearch.client.Client; -import org.opensearch.common.Nullable; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.geo.GeoJson; @@ -56,6 +56,7 @@ import org.opensearch.common.xcontent.XContentParser; import org.opensearch.geometry.Geometry; import org.opensearch.index.mapper.MappedFieldType; +import org.opensearch.index.mapper.MapperService; import java.io.IOException; import java.util.Objects; @@ -66,9 +67,6 @@ */ public abstract class AbstractGeometryQueryBuilder> extends AbstractQueryBuilder { - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Types are deprecated in [geo_shape] queries. " - + "The type should no longer be specified in the [indexed_shape] section."; - public static final String DEFAULT_SHAPE_INDEX_NAME = "shapes"; public static final String DEFAULT_SHAPE_FIELD_NAME = "shape"; public static final ShapeRelation DEFAULT_SHAPE_RELATION = ShapeRelation.INTERSECTS; @@ -80,7 +78,6 @@ public abstract class AbstractGeometryQueryBuilder supplier; protected final String indexedShapeId; - protected final String indexedShapeType; protected Geometry shape; protected String indexedShapeIndex = DEFAULT_SHAPE_INDEX_NAME; @@ -113,7 +109,7 @@ public abstract class AbstractGeometryQueryBuilder supplier, - String indexedShapeId, - @Nullable String indexedShapeType - ) { + protected AbstractGeometryQueryBuilder(String fieldName, Supplier supplier, String indexedShapeId) { + if (fieldName == null) { + throw new IllegalArgumentException("fieldName is required"); + } + if (supplier == null && indexedShapeId == null) { + throw new IllegalArgumentException("either shape or indexedShapeId is required"); + } + this.fieldName = fieldName; this.shape = null; this.supplier = supplier; this.indexedShapeId = indexedShapeId; - this.indexedShapeType = indexedShapeType; } /** @@ -196,11 +174,13 @@ protected AbstractGeometryQueryBuilder(StreamInput in) throws IOException { if (in.readBoolean()) { shape = GeometryIO.readGeometry(in); indexedShapeId = null; - indexedShapeType = null; } else { shape = null; indexedShapeId = in.readOptionalString(); - indexedShapeType = in.readOptionalString(); + if (in.getVersion().before(Version.V_2_0_0)) { + String type = in.readOptionalString(); + assert MapperService.SINGLE_MAPPING_NAME.equals(type) : "Expected type [_doc], got [" + type + "]"; + } indexedShapeIndex = in.readOptionalString(); indexedShapePath = in.readOptionalString(); indexedShapeRouting = in.readOptionalString(); @@ -222,7 +202,9 @@ protected void doWriteTo(StreamOutput out) throws IOException { GeometryIO.writeGeometry(out, shape); } else { out.writeOptionalString(indexedShapeId); - out.writeOptionalString(indexedShapeType); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeOptionalString(MapperService.SINGLE_MAPPING_NAME); + } out.writeOptionalString(indexedShapeIndex); out.writeOptionalString(indexedShapePath); out.writeOptionalString(indexedShapeRouting); @@ -266,17 +248,6 @@ public String indexedShapeId() { return indexedShapeId; } - /** - * @return the document type of the indexed Shape that will be used in the - * Query - * - * @deprecated Types are in the process of being removed. - */ - @Deprecated - public String indexedShapeType() { - return indexedShapeType; - } - /** * Sets the name of the index where the indexed Shape can be found * @@ -382,12 +353,11 @@ public boolean ignoreUnmapped() { /** creates a new ShapeQueryBuilder from the provided field name and shape builder */ protected abstract AbstractGeometryQueryBuilder newShapeQueryBuilder(String fieldName, Geometry shape); - /** creates a new ShapeQueryBuilder from the provided field name, supplier, indexed shape id, and indexed shape type */ + /** creates a new ShapeQueryBuilder from the provided field name, supplier, indexed shape id */ protected abstract AbstractGeometryQueryBuilder newShapeQueryBuilder( String fieldName, Supplier shapeSupplier, - String indexedShapeId, - String indexedShapeType + String indexedShapeId ); @Override @@ -423,14 +393,10 @@ private void fetch(Client client, GetRequest getRequest, String path, ActionList public void onResponse(GetResponse response) { try { if (!response.isExists()) { - throw new IllegalArgumentException( - "Shape with ID [" + getRequest.id() + "] in type [" + getRequest.type() + "] not found" - ); + throw new IllegalArgumentException("Shape with ID [" + getRequest.id() + "] not found"); } if (response.isSourceEmpty()) { - throw new IllegalArgumentException( - "Shape with ID [" + getRequest.id() + "] in type [" + getRequest.type() + "] source disabled" - ); + throw new IllegalArgumentException("Shape with ID [" + getRequest.id() + "] source disabled"); } String[] pathElements = path.split("\\."); @@ -484,9 +450,6 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep GeoJson.toXContent(shape, builder, params); } else { builder.startObject(INDEXED_SHAPE_FIELD.getPreferredName()).field(SHAPE_ID_FIELD.getPreferredName(), indexedShapeId); - if (indexedShapeType != null) { - builder.field(SHAPE_TYPE_FIELD.getPreferredName(), indexedShapeType); - } if (indexedShapeIndex != null) { builder.field(SHAPE_INDEX_FIELD.getPreferredName(), indexedShapeIndex); } @@ -518,7 +481,6 @@ protected boolean doEquals(AbstractGeometryQueryBuilder other) { && Objects.equals(indexedShapeId, other.indexedShapeId) && Objects.equals(indexedShapeIndex, other.indexedShapeIndex) && Objects.equals(indexedShapePath, other.indexedShapePath) - && Objects.equals(indexedShapeType, other.indexedShapeType) && Objects.equals(indexedShapeRouting, other.indexedShapeRouting) && Objects.equals(relation, other.relation) && Objects.equals(shape, other.shape) @@ -533,7 +495,6 @@ protected int doHashCode() { indexedShapeId, indexedShapeIndex, indexedShapePath, - indexedShapeType, indexedShapeRouting, relation, shape, @@ -549,19 +510,14 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws } else if (this.shape == null) { SetOnce supplier = new SetOnce<>(); queryRewriteContext.registerAsyncAction((client, listener) -> { - GetRequest getRequest; - if (indexedShapeType == null) { - getRequest = new GetRequest(indexedShapeIndex, indexedShapeId); - } else { - getRequest = new GetRequest(indexedShapeIndex, indexedShapeType, indexedShapeId); - } + GetRequest getRequest = new GetRequest(indexedShapeIndex, indexedShapeId); getRequest.routing(indexedShapeRouting); fetch(client, getRequest, indexedShapePath, ActionListener.wrap(builder -> { supplier.set(builder); listener.onResponse(null); }, listener::onFailure)); }); - return newShapeQueryBuilder(this.fieldName, supplier::get, this.indexedShapeId, this.indexedShapeType).relation(relation); + return newShapeQueryBuilder(this.fieldName, supplier::get, this.indexedShapeId).relation(relation); } return this; } @@ -573,7 +529,6 @@ protected abstract static class ParsedGeometryQueryParams { public ShapeBuilder shape; public String id = null; - public String type = null; public String index = null; public String shapePath = null; public String shapeRouting = null; @@ -617,8 +572,6 @@ public static ParsedGeometryQueryParams parsedParamsFromXContent(XContentParser } else if (token.isValue()) { if (SHAPE_ID_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { params.id = parser.text(); - } else if (SHAPE_TYPE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - params.type = parser.text(); } else if (SHAPE_INDEX_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { params.index = parser.text(); } else if (SHAPE_PATH_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { diff --git a/server/src/main/java/org/opensearch/index/query/GeoShapeQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/GeoShapeQueryBuilder.java index 246a1e1dcf921..161c6e64c7bf3 100644 --- a/server/src/main/java/org/opensearch/index/query/GeoShapeQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/GeoShapeQueryBuilder.java @@ -34,7 +34,6 @@ import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; -import org.opensearch.common.Nullable; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.geo.ShapeRelation; @@ -43,7 +42,6 @@ import org.opensearch.common.geo.parsers.ShapeParser; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.geometry.Geometry; @@ -62,8 +60,6 @@ */ public class GeoShapeQueryBuilder extends AbstractGeometryQueryBuilder { public static final String NAME = "geo_shape"; - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(GeoShapeQueryBuilder.class); - protected static final ParseField STRATEGY_FIELD = new ParseField("strategy"); private SpatialStrategy strategy; @@ -97,31 +93,8 @@ public GeoShapeQueryBuilder(String fieldName, ShapeBuilder shape) { super(fieldName, shape); } - public GeoShapeQueryBuilder( - String fieldName, - Supplier shapeSupplier, - String indexedShapeId, - @Nullable String indexedShapeType - ) { - super(fieldName, shapeSupplier, indexedShapeId, indexedShapeType); - } - - /** - * Creates a new GeoShapeQueryBuilder whose Query will be against the given - * field name and will use the Shape found with the given ID in the given - * type - * - * @param fieldName - * Name of the field that will be filtered - * @param indexedShapeId - * ID of the indexed Shape that will be used in the Query - * @param indexedShapeType - * Index type of the indexed Shapes - * @deprecated use {@link #GeoShapeQueryBuilder(String, String)} instead - */ - @Deprecated - public GeoShapeQueryBuilder(String fieldName, String indexedShapeId, String indexedShapeType) { - super(fieldName, indexedShapeId, indexedShapeType); + public GeoShapeQueryBuilder(String fieldName, Supplier shapeSupplier, String indexedShapeId) { + super(fieldName, shapeSupplier, indexedShapeId); } /** @@ -223,13 +196,8 @@ protected GeoShapeQueryBuilder newShapeQueryBuilder(String fieldName, Geometry s } @Override - protected GeoShapeQueryBuilder newShapeQueryBuilder( - String fieldName, - Supplier shapeSupplier, - String indexedShapeId, - String indexedShapeType - ) { - return new GeoShapeQueryBuilder(fieldName, shapeSupplier, indexedShapeId, indexedShapeType); + protected GeoShapeQueryBuilder newShapeQueryBuilder(String fieldName, Supplier shapeSupplier, String indexedShapeId) { + return new GeoShapeQueryBuilder(fieldName, shapeSupplier, indexedShapeId); } @Override @@ -291,14 +259,11 @@ public static GeoShapeQueryBuilder fromXContent(XContentParser parser) throws IO ); GeoShapeQueryBuilder builder; - if (pgsqp.type != null) { - deprecationLogger.deprecate("geo_share_query_with_types", TYPES_DEPRECATION_MESSAGE); - } if (pgsqp.shape != null) { builder = new GeoShapeQueryBuilder(pgsqp.fieldName, pgsqp.shape); } else { - builder = new GeoShapeQueryBuilder(pgsqp.fieldName, pgsqp.id, pgsqp.type); + builder = new GeoShapeQueryBuilder(pgsqp.fieldName, pgsqp.id); } if (pgsqp.index != null) { diff --git a/server/src/main/java/org/opensearch/index/query/IdsQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/IdsQueryBuilder.java index 2b63e424b4a92..724425957f75a 100644 --- a/server/src/main/java/org/opensearch/index/query/IdsQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/IdsQueryBuilder.java @@ -32,9 +32,8 @@ package org.opensearch.index.query; -import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; -import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.Version; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.Strings; @@ -44,14 +43,12 @@ import org.opensearch.common.xcontent.ObjectParser; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentParser; -import org.opensearch.index.mapper.DocumentMapper; import org.opensearch.index.mapper.IdFieldMapper; import org.opensearch.index.mapper.MappedFieldType; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Objects; @@ -72,8 +69,6 @@ public class IdsQueryBuilder extends AbstractQueryBuilder { private final Set ids = new HashSet<>(); - private String[] types = Strings.EMPTY_ARRAY; - /** * Creates a new IdsQueryBuilder with no types specified upfront */ @@ -86,38 +81,23 @@ public IdsQueryBuilder() { */ public IdsQueryBuilder(StreamInput in) throws IOException { super(in); - types = in.readStringArray(); + if (in.getVersion().before(Version.V_2_0_0)) { + // types no longer relevant so ignore + String[] types = in.readStringArray(); + if (types.length > 0) { + throw new IllegalStateException("types are no longer supported in ids query but found [" + Arrays.toString(types) + "]"); + } + } Collections.addAll(ids, in.readStringArray()); } @Override protected void doWriteTo(StreamOutput out) throws IOException { - out.writeStringArray(types); - out.writeStringArray(ids.toArray(new String[ids.size()])); - } - - /** - * Add types to query - * - * @deprecated Types are in the process of being removed, prefer to filter on a field instead. - */ - @Deprecated - public IdsQueryBuilder types(String... types) { - if (types == null) { - throw new IllegalArgumentException("[" + NAME + "] types cannot be null"); + if (out.getVersion().before(Version.V_2_0_0)) { + // types not supported so send an empty array to previous versions + out.writeStringArray(Strings.EMPTY_ARRAY); } - this.types = types; - return this; - } - - /** - * Returns the types used in this query - * - * @deprecated Types are in the process of being removed, prefer to filter on a field instead. - */ - @Deprecated - public String[] types() { - return this.types; + out.writeStringArray(ids.toArray(new String[ids.size()])); } /** @@ -141,9 +121,6 @@ public Set ids() { @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); - if (types.length > 0) { - builder.array(TYPE_FIELD.getPreferredName(), types); - } builder.startArray(VALUES_FIELD.getPreferredName()); for (String value : ids) { builder.value(value); @@ -156,18 +133,13 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep private static final ObjectParser PARSER = new ObjectParser<>(NAME, IdsQueryBuilder::new); static { - PARSER.declareStringArray(fromList(String.class, IdsQueryBuilder::types), IdsQueryBuilder.TYPE_FIELD); PARSER.declareStringArray(fromList(String.class, IdsQueryBuilder::addIds), IdsQueryBuilder.VALUES_FIELD); declareStandardFields(PARSER); } public static IdsQueryBuilder fromXContent(XContentParser parser) { try { - IdsQueryBuilder builder = PARSER.apply(parser, null); - if (builder.types().length > 0) { - deprecationLogger.deprecate("ids_query_with_types", TYPES_DEPRECATION_MESSAGE); - } - return builder; + return PARSER.apply(parser, null); } catch (IllegalArgumentException e) { throw new ParsingException(parser.getTokenLocation(), e.getMessage(), e); } @@ -193,34 +165,20 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws @Override protected Query doToQuery(QueryShardContext context) throws IOException { - MappedFieldType idField = context.fieldMapper(IdFieldMapper.NAME); + MappedFieldType idField = context.getFieldType(IdFieldMapper.NAME); if (idField == null || ids.isEmpty()) { throw new IllegalStateException("Rewrite first"); } - final DocumentMapper mapper = context.getMapperService().documentMapper(); - Collection typesForQuery; - if (types.length == 0) { - typesForQuery = context.queryTypes(); - } else if (types.length == 1 && Metadata.ALL.equals(types[0])) { - typesForQuery = Collections.singleton(mapper.type()); - } else { - typesForQuery = new HashSet<>(Arrays.asList(types)); - } - - if (typesForQuery.contains(mapper.type())) { - return idField.termsQuery(new ArrayList<>(ids), context); - } else { - return new MatchNoDocsQuery("Type mismatch"); - } + return idField.termsQuery(new ArrayList<>(ids), context); } @Override protected int doHashCode() { - return Objects.hash(ids, Arrays.hashCode(types)); + return Objects.hash(ids); } @Override protected boolean doEquals(IdsQueryBuilder other) { - return Objects.equals(ids, other.ids) && Arrays.equals(types, other.types); + return Objects.equals(ids, other.ids); } } diff --git a/server/src/main/java/org/opensearch/index/query/IntervalBuilder.java b/server/src/main/java/org/opensearch/index/query/IntervalBuilder.java index b3166c39e68c8..e91adc5abe27a 100644 --- a/server/src/main/java/org/opensearch/index/query/IntervalBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/IntervalBuilder.java @@ -69,13 +69,20 @@ public IntervalBuilder(String field, Analyzer analyzer) { } public IntervalsSource analyzeText(String query, int maxGaps, boolean ordered) throws IOException { + return analyzeText(query, maxGaps, ordered ? IntervalMode.ORDERED : IntervalMode.UNORDERED); + } + + public IntervalsSource analyzeText(String query, int maxGaps, IntervalMode mode) throws IOException { try (TokenStream ts = analyzer.tokenStream(field, query); CachingTokenFilter stream = new CachingTokenFilter(ts)) { - return analyzeText(stream, maxGaps, ordered); + return analyzeText(stream, maxGaps, mode); } } protected IntervalsSource analyzeText(CachingTokenFilter stream, int maxGaps, boolean ordered) throws IOException { + return analyzeText(stream, maxGaps, ordered ? IntervalMode.ORDERED : IntervalMode.UNORDERED); + } + protected IntervalsSource analyzeText(CachingTokenFilter stream, int maxGaps, IntervalMode mode) throws IOException { TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); PositionIncrementAttribute posIncAtt = stream.addAttribute(PositionIncrementAttribute.class); PositionLengthAttribute posLenAtt = stream.addAttribute(PositionLengthAttribute.class); @@ -114,15 +121,15 @@ protected IntervalsSource analyzeText(CachingTokenFilter stream, int maxGaps, bo return analyzeTerm(stream); } else if (isGraph) { // graph - return combineSources(analyzeGraph(stream), maxGaps, ordered); + return combineSources(analyzeGraph(stream), maxGaps, mode); } else { // phrase if (hasSynonyms) { // phrase with single-term synonyms - return analyzeSynonyms(stream, maxGaps, ordered); + return analyzeSynonyms(stream, maxGaps, mode); } else { // simple phrase - return combineSources(analyzeTerms(stream), maxGaps, ordered); + return combineSources(analyzeTerms(stream), maxGaps, mode); } } @@ -135,7 +142,7 @@ protected IntervalsSource analyzeTerm(TokenStream ts) throws IOException { return Intervals.term(BytesRef.deepCopyOf(bytesAtt.getBytesRef())); } - protected static IntervalsSource combineSources(List sources, int maxGaps, boolean ordered) { + protected static IntervalsSource combineSources(List sources, int maxGaps, IntervalMode mode) { if (sources.size() == 0) { return NO_INTERVALS; } @@ -143,10 +150,21 @@ protected static IntervalsSource combineSources(List sources, i return sources.get(0); } IntervalsSource[] sourcesArray = sources.toArray(new IntervalsSource[0]); - if (maxGaps == 0 && ordered) { + if (maxGaps == 0 && mode == IntervalMode.ORDERED) { return Intervals.phrase(sourcesArray); } - IntervalsSource inner = ordered ? Intervals.ordered(sourcesArray) : Intervals.unordered(sourcesArray); + IntervalsSource inner; + if (mode == IntervalMode.ORDERED) { + inner = Intervals.ordered(sourcesArray); + } else if (mode == IntervalMode.UNORDERED) { + inner = Intervals.unordered(sourcesArray); + } else { + inner = Intervals.unorderedNoOverlaps(sourcesArray[0], sourcesArray[1]); + for (int sourceIdx = 2; sourceIdx < sourcesArray.length; sourceIdx++) { + inner = Intervals.unorderedNoOverlaps(maxGaps == -1 ? inner : Intervals.maxgaps(maxGaps, inner), sourcesArray[sourceIdx]); + } + } + if (maxGaps == -1) { return inner; } @@ -174,7 +192,7 @@ public static IntervalsSource extend(IntervalsSource source, int precedingSpaces return Intervals.extend(source, precedingSpaces, 0); } - protected IntervalsSource analyzeSynonyms(TokenStream ts, int maxGaps, boolean ordered) throws IOException { + protected IntervalsSource analyzeSynonyms(TokenStream ts, int maxGaps, IntervalMode mode) throws IOException { List terms = new ArrayList<>(); List synonyms = new ArrayList<>(); TermToBytesRefAttribute bytesAtt = ts.addAttribute(TermToBytesRefAttribute.class); @@ -199,7 +217,7 @@ protected IntervalsSource analyzeSynonyms(TokenStream ts, int maxGaps, boolean o } else { terms.add(extend(Intervals.or(synonyms.toArray(new IntervalsSource[0])), spaces)); } - return combineSources(terms, maxGaps, ordered); + return combineSources(terms, maxGaps, mode); } protected List analyzeGraph(TokenStream source) throws IOException { @@ -222,7 +240,7 @@ protected List analyzeGraph(TokenStream source) throws IOExcept Iterator it = graph.getFiniteStrings(start, end); while (it.hasNext()) { TokenStream ts = it.next(); - IntervalsSource phrase = combineSources(analyzeTerms(ts), 0, true); + IntervalsSource phrase = combineSources(analyzeTerms(ts), 0, IntervalMode.ORDERED); if (paths.size() >= maxClauseCount) { throw new BooleanQuery.TooManyClauses(); } diff --git a/server/src/main/java/org/opensearch/index/query/IntervalMode.java b/server/src/main/java/org/opensearch/index/query/IntervalMode.java new file mode 100644 index 0000000000000..f0489bf452678 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/query/IntervalMode.java @@ -0,0 +1,59 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.query; + +import org.opensearch.OpenSearchException; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.io.stream.Writeable; + +import java.io.IOException; + +public enum IntervalMode implements Writeable { + ORDERED(0), + UNORDERED(1), + UNORDERED_NO_OVERLAP(2); + + private final int ordinal; + + IntervalMode(int ordinal) { + this.ordinal = ordinal; + } + + public static IntervalMode readFromStream(StreamInput in) throws IOException { + int ord = in.readVInt(); + switch (ord) { + case (0): + return ORDERED; + case (1): + return UNORDERED; + case (2): + return UNORDERED_NO_OVERLAP; + } + throw new OpenSearchException("unknown serialized type [" + ord + "]"); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(this.ordinal); + } + + public static IntervalMode fromString(String intervalMode) { + if (intervalMode == null) { + throw new IllegalArgumentException("cannot parse mode from null string"); + } + + for (IntervalMode mode : IntervalMode.values()) { + if (mode.name().equalsIgnoreCase(intervalMode)) { + return mode; + } + } + throw new IllegalArgumentException("no mode can be parsed from ordinal " + intervalMode); + } +} diff --git a/server/src/main/java/org/opensearch/index/query/IntervalsSourceProvider.java b/server/src/main/java/org/opensearch/index/query/IntervalsSourceProvider.java index 3461a0ebcf3ca..b53dfc32d7006 100644 --- a/server/src/main/java/org/opensearch/index/query/IntervalsSourceProvider.java +++ b/server/src/main/java/org/opensearch/index/query/IntervalsSourceProvider.java @@ -40,6 +40,7 @@ import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.CompiledAutomaton; +import org.apache.lucene.util.automaton.RegExp; import org.opensearch.LegacyESVersion; import org.opensearch.Version; import org.opensearch.common.ParseField; @@ -73,7 +74,7 @@ * Factory class for {@link IntervalsSource} * * Built-in sources include {@link Match}, which analyzes a text string and converts it - * to a proximity source (phrase, ordered or unordered depending on how + * to a proximity source (phrase, ordered, unordered, unordered without overlaps depending on how * strict the matching should be); {@link Combine}, which allows proximity queries * between different sub-sources; and {@link Disjunction}. */ @@ -130,15 +131,15 @@ public static class Match extends IntervalsSourceProvider { private final String query; private final int maxGaps; - private final boolean ordered; + private final IntervalMode mode; private final String analyzer; private final IntervalFilter filter; private final String useField; - public Match(String query, int maxGaps, boolean ordered, String analyzer, IntervalFilter filter, String useField) { + public Match(String query, int maxGaps, IntervalMode mode, String analyzer, IntervalFilter filter, String useField) { this.query = query; this.maxGaps = maxGaps; - this.ordered = ordered; + this.mode = mode; this.analyzer = analyzer; this.filter = filter; this.useField = useField; @@ -147,7 +148,15 @@ public Match(String query, int maxGaps, boolean ordered, String analyzer, Interv public Match(StreamInput in) throws IOException { this.query = in.readString(); this.maxGaps = in.readVInt(); - this.ordered = in.readBoolean(); + if (in.getVersion().onOrAfter(Version.V_1_3_0)) { + this.mode = IntervalMode.readFromStream(in); + } else { + if (in.readBoolean()) { + this.mode = IntervalMode.ORDERED; + } else { + this.mode = IntervalMode.UNORDERED; + } + } this.analyzer = in.readOptionalString(); this.filter = in.readOptionalWriteable(IntervalFilter::new); if (in.getVersion().onOrAfter(LegacyESVersion.V_7_2_0)) { @@ -167,9 +176,9 @@ public IntervalsSource getSource(QueryShardContext context, MappedFieldType fiel if (useField != null) { fieldType = context.fieldMapper(useField); assert fieldType != null; - source = Intervals.fixField(useField, fieldType.intervals(query, maxGaps, ordered, analyzer, false)); + source = Intervals.fixField(useField, fieldType.intervals(query, maxGaps, mode, analyzer, false)); } else { - source = fieldType.intervals(query, maxGaps, ordered, analyzer, false); + source = fieldType.intervals(query, maxGaps, mode, analyzer, false); } if (filter != null) { return filter.filter(source, context, fieldType); @@ -190,7 +199,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; Match match = (Match) o; return maxGaps == match.maxGaps - && ordered == match.ordered + && mode == match.mode && Objects.equals(query, match.query) && Objects.equals(filter, match.filter) && Objects.equals(useField, match.useField) @@ -199,7 +208,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(query, maxGaps, ordered, analyzer, filter, useField); + return Objects.hash(query, maxGaps, mode, analyzer, filter, useField); } @Override @@ -211,7 +220,11 @@ public String getWriteableName() { public void writeTo(StreamOutput out) throws IOException { out.writeString(query); out.writeVInt(maxGaps); - out.writeBoolean(ordered); + if (out.getVersion().onOrAfter(Version.V_1_3_0)) { + mode.writeTo(out); + } else { + out.writeBoolean(mode == IntervalMode.ORDERED); + } out.writeOptionalString(analyzer); out.writeOptionalWriteable(filter); if (out.getVersion().onOrAfter(LegacyESVersion.V_7_2_0)) { @@ -225,7 +238,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.field("query", query); builder.field("max_gaps", maxGaps); - builder.field("ordered", ordered); + builder.field("mode", mode); if (analyzer != null) { builder.field("analyzer", analyzer); } @@ -241,16 +254,28 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, args -> { String query = (String) args[0]; int max_gaps = (args[1] == null ? -1 : (Integer) args[1]); - boolean ordered = (args[2] != null && (boolean) args[2]); - String analyzer = (String) args[3]; - IntervalFilter filter = (IntervalFilter) args[4]; - String useField = (String) args[5]; - return new Match(query, max_gaps, ordered, analyzer, filter, useField); + Boolean ordered = (Boolean) args[2]; + String mode = (String) args[3]; + String analyzer = (String) args[4]; + IntervalFilter filter = (IntervalFilter) args[5]; + String useField = (String) args[6]; + + IntervalMode intervalMode; + if (ordered != null) { + intervalMode = ordered ? IntervalMode.ORDERED : IntervalMode.UNORDERED; + } else if (mode != null) { + intervalMode = IntervalMode.fromString(mode); + } else { + intervalMode = IntervalMode.UNORDERED; + } + + return new Match(query, max_gaps, intervalMode, analyzer, filter, useField); }); static { PARSER.declareString(constructorArg(), new ParseField("query")); PARSER.declareInt(optionalConstructorArg(), new ParseField("max_gaps")); - PARSER.declareBoolean(optionalConstructorArg(), new ParseField("ordered")); + PARSER.declareBoolean(optionalConstructorArg(), new ParseField("ordered").withAllDeprecated()); + PARSER.declareString(optionalConstructorArg(), new ParseField("mode")); PARSER.declareString(optionalConstructorArg(), new ParseField("analyzer")); PARSER.declareObject(optionalConstructorArg(), (p, c) -> IntervalFilter.fromXContent(p), new ParseField("filter")); PARSER.declareString(optionalConstructorArg(), new ParseField("use_field")); @@ -268,8 +293,8 @@ int getMaxGaps() { return maxGaps; } - boolean isOrdered() { - return ordered; + IntervalMode getMode() { + return mode; } String getAnalyzer() { @@ -395,19 +420,23 @@ public static class Combine extends IntervalsSourceProvider { public static final String NAME = "all_of"; private final List subSources; - private final boolean ordered; + private final IntervalMode mode; private final int maxGaps; private final IntervalFilter filter; - public Combine(List subSources, boolean ordered, int maxGaps, IntervalFilter filter) { + public Combine(List subSources, IntervalMode mode, int maxGaps, IntervalFilter filter) { this.subSources = subSources; - this.ordered = ordered; + this.mode = mode; this.maxGaps = maxGaps; this.filter = filter; } public Combine(StreamInput in) throws IOException { - this.ordered = in.readBoolean(); + if (in.getVersion().onOrAfter(Version.V_1_3_0)) { + this.mode = IntervalMode.readFromStream(in); + } else { + this.mode = in.readBoolean() ? IntervalMode.ORDERED : IntervalMode.UNORDERED; + } this.subSources = in.readNamedWriteableList(IntervalsSourceProvider.class); this.maxGaps = in.readInt(); this.filter = in.readOptionalWriteable(IntervalFilter::new); @@ -419,7 +448,7 @@ public IntervalsSource getSource(QueryShardContext ctx, MappedFieldType fieldTyp for (IntervalsSourceProvider provider : subSources) { ss.add(provider.getSource(ctx, fieldType)); } - IntervalsSource source = IntervalBuilder.combineSources(ss, maxGaps, ordered); + IntervalsSource source = IntervalBuilder.combineSources(ss, maxGaps, mode); if (filter != null) { return filter.filter(source, ctx, fieldType); } @@ -439,14 +468,14 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; Combine combine = (Combine) o; return Objects.equals(subSources, combine.subSources) - && ordered == combine.ordered + && mode == combine.mode && maxGaps == combine.maxGaps && Objects.equals(filter, combine.filter); } @Override public int hashCode() { - return Objects.hash(subSources, ordered, maxGaps, filter); + return Objects.hash(subSources, mode, maxGaps, filter); } @Override @@ -456,7 +485,11 @@ public String getWriteableName() { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeBoolean(ordered); + if (out.getVersion().onOrAfter(Version.V_1_3_0)) { + mode.writeTo(out); + } else { + out.writeBoolean(mode == IntervalMode.ORDERED); + } out.writeNamedWriteableList(subSources); out.writeInt(maxGaps); out.writeOptionalWriteable(filter); @@ -465,7 +498,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); - builder.field("ordered", ordered); + builder.field("mode", mode); builder.field("max_gaps", maxGaps); builder.startArray("intervals"); for (IntervalsSourceProvider provider : subSources) { @@ -482,14 +515,26 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @SuppressWarnings("unchecked") static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, args -> { - boolean ordered = (args[0] != null && (boolean) args[0]); - List subSources = (List) args[1]; - Integer maxGaps = (args[2] == null ? -1 : (Integer) args[2]); - IntervalFilter filter = (IntervalFilter) args[3]; - return new Combine(subSources, ordered, maxGaps, filter); + Boolean ordered = (Boolean) args[0]; + String mode = (String) args[1]; + List subSources = (List) args[2]; + Integer maxGaps = (args[3] == null ? -1 : (Integer) args[3]); + IntervalFilter filter = (IntervalFilter) args[4]; + + IntervalMode intervalMode; + if (ordered != null) { + intervalMode = ordered ? IntervalMode.ORDERED : IntervalMode.UNORDERED; + } else if (mode != null) { + intervalMode = IntervalMode.fromString(mode); + } else { + intervalMode = IntervalMode.UNORDERED; + } + + return new Combine(subSources, intervalMode, maxGaps, filter); }); static { - PARSER.declareBoolean(optionalConstructorArg(), new ParseField("ordered")); + PARSER.declareBoolean(optionalConstructorArg(), new ParseField("ordered").withAllDeprecated()); + PARSER.declareString(optionalConstructorArg(), new ParseField("mode")); PARSER.declareObjectArray( constructorArg(), (p, c) -> IntervalsSourceProvider.parseInnerIntervals(p), @@ -507,8 +552,8 @@ List getSubSources() { return subSources; } - boolean isOrdered() { - return ordered; + IntervalMode getMode() { + return mode; } int getMaxGaps() { @@ -550,9 +595,9 @@ public IntervalsSource getSource(QueryShardContext context, MappedFieldType fiel if (useField != null) { fieldType = context.fieldMapper(useField); assert fieldType != null; - source = Intervals.fixField(useField, fieldType.intervals(prefix, 0, false, analyzer, true)); + source = Intervals.fixField(useField, fieldType.intervals(prefix, 0, IntervalMode.UNORDERED, analyzer, true)); } else { - source = fieldType.intervals(prefix, 0, false, analyzer, true); + source = fieldType.intervals(prefix, 0, IntervalMode.UNORDERED, analyzer, true); } return source; } @@ -643,12 +688,20 @@ public static class Regexp extends IntervalsSourceProvider { private final int flags; private final String useField; private final Integer maxExpansions; - - public Regexp(String pattern, int flags, String useField, Integer maxExpansions) { + private final boolean caseInsensitive; + + /** + * Constructor + * + * {@code flags} is Lucene's syntax flags + * and {@code caseInsensitive} enables Lucene's only matching flag. + */ + public Regexp(String pattern, int flags, String useField, Integer maxExpansions, boolean caseInsensitive) { this.pattern = pattern; this.flags = flags; this.useField = useField; this.maxExpansions = (maxExpansions != null && maxExpansions > 0) ? maxExpansions : null; + this.caseInsensitive = caseInsensitive; } public Regexp(StreamInput in) throws IOException { @@ -656,11 +709,20 @@ public Regexp(StreamInput in) throws IOException { this.flags = in.readVInt(); this.useField = in.readOptionalString(); this.maxExpansions = in.readOptionalVInt(); + if (in.getVersion().onOrAfter(Version.V_1_3_0)) { + this.caseInsensitive = in.readBoolean(); + } else { + this.caseInsensitive = false; + } } @Override public IntervalsSource getSource(QueryShardContext context, MappedFieldType fieldType) { - final org.apache.lucene.util.automaton.RegExp regexp = new org.apache.lucene.util.automaton.RegExp(pattern, flags); + final org.apache.lucene.util.automaton.RegExp regexp = new org.apache.lucene.util.automaton.RegExp( + pattern, + flags, + caseInsensitive ? RegExp.ASCII_CASE_INSENSITIVE : 0 + ); final CompiledAutomaton automaton = new CompiledAutomaton(regexp.toAutomaton()); if (useField != null) { @@ -701,12 +763,13 @@ public boolean equals(Object o) { return Objects.equals(pattern, regexp.pattern) && Objects.equals(flags, regexp.flags) && Objects.equals(useField, regexp.useField) - && Objects.equals(maxExpansions, regexp.maxExpansions); + && Objects.equals(maxExpansions, regexp.maxExpansions) + && Objects.equals(caseInsensitive, regexp.caseInsensitive); } @Override public int hashCode() { - return Objects.hash(pattern, flags, useField, maxExpansions); + return Objects.hash(pattern, flags, useField, maxExpansions, caseInsensitive); } @Override @@ -720,6 +783,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVInt(flags); out.writeOptionalString(useField); out.writeOptionalVInt(maxExpansions); + if (out.getVersion().onOrAfter(Version.V_1_3_0)) { + out.writeBoolean(caseInsensitive); + } } @Override @@ -735,6 +801,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (maxExpansions != null) { builder.field("max_expansions", maxExpansions); } + if (caseInsensitive) { + builder.field("case_insensitive", caseInsensitive); + } builder.endObject(); return builder; } @@ -745,13 +814,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws Integer flagsValue = (Integer) args[2]; String useField = (String) args[3]; Integer maxExpansions = (Integer) args[4]; + boolean caseInsensitive = args[5] != null && (boolean) args[5]; if (flagsValue != null) { - return new Regexp(pattern, flagsValue, useField, maxExpansions); + return new Regexp(pattern, flagsValue, useField, maxExpansions, caseInsensitive); } else if (flags != null) { - return new Regexp(pattern, RegexpFlag.resolveValue(flags), useField, maxExpansions); + return new Regexp(pattern, RegexpFlag.resolveValue(flags), useField, maxExpansions, caseInsensitive); } else { - return new Regexp(pattern, DEFAULT_FLAGS_VALUE, useField, maxExpansions); + return new Regexp(pattern, DEFAULT_FLAGS_VALUE, useField, maxExpansions, caseInsensitive); } }); static { @@ -760,6 +830,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws PARSER.declareInt(optionalConstructorArg(), new ParseField("flags_value")); PARSER.declareString(optionalConstructorArg(), new ParseField("use_field")); PARSER.declareInt(optionalConstructorArg(), new ParseField("max_expansions")); + PARSER.declareBoolean(optionalConstructorArg(), new ParseField("case_insensitive")); } public static Regexp fromXContent(XContentParser parser) throws IOException { @@ -781,6 +852,10 @@ String getUseField() { Integer getMaxExpansions() { return maxExpansions; } + + boolean isCaseInsensitive() { + return caseInsensitive; + } } public static class Wildcard extends IntervalsSourceProvider { diff --git a/server/src/main/java/org/opensearch/index/query/MoreLikeThisQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/MoreLikeThisQueryBuilder.java index a72fc8efb2284..1ecc88387e549 100644 --- a/server/src/main/java/org/opensearch/index/query/MoreLikeThisQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/MoreLikeThisQueryBuilder.java @@ -39,6 +39,7 @@ import org.apache.lucene.search.Query; import org.opensearch.OpenSearchParseException; import org.opensearch.ExceptionsHelper; +import org.opensearch.Version; import org.opensearch.action.RoutingMissingException; import org.opensearch.action.termvectors.MultiTermVectorsItemResponse; import org.opensearch.action.termvectors.MultiTermVectorsRequest; @@ -54,7 +55,6 @@ import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.io.stream.Writeable; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.lucene.search.MoreLikeThisQuery; import org.opensearch.common.lucene.search.XMoreLikeThis; import org.opensearch.common.lucene.uid.Versions; @@ -67,7 +67,6 @@ import org.opensearch.index.mapper.IdFieldMapper; import org.opensearch.index.mapper.KeywordFieldMapper.KeywordFieldType; import org.opensearch.index.mapper.MappedFieldType; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.TextFieldMapper.TextFieldType; import java.io.IOException; @@ -81,7 +80,6 @@ import java.util.Objects; import java.util.Optional; import java.util.Set; -import java.util.stream.Stream; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; @@ -92,7 +90,6 @@ */ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "more_like_this"; - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(MoreLikeThisQueryBuilder.class); static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Types are deprecated in [more_like_this] " + "queries. The type should no longer be specified in the [like] and [unlike] sections."; @@ -128,7 +125,6 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder item.type == null); - } - private static void parseLikeField(XContentParser parser, List texts, List items) throws IOException { if (parser.currentToken().isValue()) { texts.add(parser.text()); @@ -1150,9 +1081,6 @@ private static void setDefaultIndexTypeFields( if (item.index() == null) { item.index(context.index().getName()); } - if (item.type() == null) { - item.type(MapperService.SINGLE_MAPPING_NAME); - } // default fields if not present but don't override for artificial docs if ((item.fields() == null || item.fields().length == 0) && item.doc() == null) { if (useDefaultField) { diff --git a/server/src/main/java/org/opensearch/index/query/QueryBuilders.java b/server/src/main/java/org/opensearch/index/query/QueryBuilders.java index 90a3cbbb54a63..7ea12fdc6406b 100644 --- a/server/src/main/java/org/opensearch/index/query/QueryBuilders.java +++ b/server/src/main/java/org/opensearch/index/query/QueryBuilders.java @@ -33,6 +33,7 @@ package org.opensearch.index.query; import org.apache.lucene.search.join.ScoreMode; +import org.opensearch.common.Nullable; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.geo.ShapeRelation; @@ -153,18 +154,6 @@ public static IdsQueryBuilder idsQuery() { return new IdsQueryBuilder(); } - /** - * Constructs a query that will match only specific ids within types. - * - * @param types The mapping/doc type - * - * @deprecated Types are in the process of being removed, use {@link #idsQuery()} instead. - */ - @Deprecated - public static IdsQueryBuilder idsQuery(String... types) { - return new IdsQueryBuilder().types(types); - } - /** * A Query that matches documents containing a term. * @@ -464,7 +453,17 @@ public static FunctionScoreQueryBuilder functionScoreQuery(FunctionScoreQueryBui * @param function The function builder used to custom score */ public static FunctionScoreQueryBuilder functionScoreQuery(ScoreFunctionBuilder function) { - return new FunctionScoreQueryBuilder(function); + return functionScoreQuery(function, null); + } + + /** + * A query that allows to define a custom scoring function. + * + * @param function The function builder used to custom score + * @param queryName The query name + */ + public static FunctionScoreQueryBuilder functionScoreQuery(ScoreFunctionBuilder function, @Nullable String queryName) { + return new FunctionScoreQueryBuilder(function, queryName); } /** @@ -622,15 +621,6 @@ public static WrapperQueryBuilder wrapperQuery(byte[] source) { return new WrapperQueryBuilder(source); } - /** - * A filter based on doc/mapping type. - * @deprecated Types are going away, prefer filtering on a field. - */ - @Deprecated - public static TypeQueryBuilder typeQuery(String type) { - return new TypeQueryBuilder(type); - } - /** * A terms query that can extract the terms from another doc in an index. */ @@ -696,14 +686,6 @@ public static GeoShapeQueryBuilder geoShapeQuery(String name, String indexedShap return new GeoShapeQueryBuilder(name, indexedShapeId); } - /** - * @deprecated Types are in the process of being removed, use {@link #geoShapeQuery(String, String)} instead. - */ - @Deprecated - public static GeoShapeQueryBuilder geoShapeQuery(String name, String indexedShapeId, String indexedShapeType) { - return new GeoShapeQueryBuilder(name, indexedShapeId, indexedShapeType); - } - /** * A filter to filter indexed shapes intersecting with shapes * @@ -732,16 +714,6 @@ public static GeoShapeQueryBuilder geoIntersectionQuery(String name, String inde return builder; } - /** - * @deprecated Types are in the process of being removed, use {@link #geoIntersectionQuery(String, String)} instead. - */ - @Deprecated - public static GeoShapeQueryBuilder geoIntersectionQuery(String name, String indexedShapeId, String indexedShapeType) { - GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId, indexedShapeType); - builder.relation(ShapeRelation.INTERSECTS); - return builder; - } - /** * A filter to filter indexed shapes that are contained by a shape * @@ -770,16 +742,6 @@ public static GeoShapeQueryBuilder geoWithinQuery(String name, String indexedSha return builder; } - /** - * @deprecated Types are in the process of being removed, use {@link #geoWithinQuery(String, String)} instead. - */ - @Deprecated - public static GeoShapeQueryBuilder geoWithinQuery(String name, String indexedShapeId, String indexedShapeType) { - GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId, indexedShapeType); - builder.relation(ShapeRelation.WITHIN); - return builder; - } - /** * A filter to filter indexed shapes that are not intersection with the query shape * @@ -808,16 +770,6 @@ public static GeoShapeQueryBuilder geoDisjointQuery(String name, String indexedS return builder; } - /** - * @deprecated Types are in the process of being removed, use {@link #geoDisjointQuery(String, String)} instead. - */ - @Deprecated - public static GeoShapeQueryBuilder geoDisjointQuery(String name, String indexedShapeId, String indexedShapeType) { - GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId, indexedShapeType); - builder.relation(ShapeRelation.DISJOINT); - return builder; - } - /** * A filter to filter only documents where a field exists in them. * diff --git a/server/src/main/java/org/opensearch/index/query/QueryRewriteContext.java b/server/src/main/java/org/opensearch/index/query/QueryRewriteContext.java index ad1f02ce0265d..720ee077119d6 100644 --- a/server/src/main/java/org/opensearch/index/query/QueryRewriteContext.java +++ b/server/src/main/java/org/opensearch/index/query/QueryRewriteContext.java @@ -52,6 +52,7 @@ public class QueryRewriteContext { protected final Client client; protected final LongSupplier nowInMillis; private final List>> asyncActions = new ArrayList<>(); + private final boolean validate; public QueryRewriteContext( NamedXContentRegistry xContentRegistry, @@ -59,11 +60,22 @@ public QueryRewriteContext( Client client, LongSupplier nowInMillis ) { + this(xContentRegistry, writeableRegistry, client, nowInMillis, false); + } + + public QueryRewriteContext( + NamedXContentRegistry xContentRegistry, + NamedWriteableRegistry writeableRegistry, + Client client, + LongSupplier nowInMillis, + boolean validate + ) { this.xContentRegistry = xContentRegistry; this.writeableRegistry = writeableRegistry; this.client = client; this.nowInMillis = nowInMillis; + this.validate = validate; } /** @@ -140,4 +152,7 @@ public void onFailure(Exception e) { } } + public boolean validate() { + return validate; + } } diff --git a/server/src/main/java/org/opensearch/index/query/QueryShardContext.java b/server/src/main/java/org/opensearch/index/query/QueryShardContext.java index ab0ae487a393d..bfc0490e507db 100644 --- a/server/src/main/java/org/opensearch/index/query/QueryShardContext.java +++ b/server/src/main/java/org/opensearch/index/query/QueryShardContext.java @@ -44,7 +44,6 @@ import org.opensearch.client.Client; import org.opensearch.common.CheckedFunction; import org.opensearch.common.ParsingException; -import org.opensearch.common.Strings; import org.opensearch.common.TriFunction; import org.opensearch.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.lucene.search.Queries; @@ -76,9 +75,6 @@ import org.opensearch.transport.RemoteClusterAware; import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -105,7 +101,6 @@ public class QueryShardContext extends QueryRewriteContext { private final TriFunction, IndexFieldData> indexFieldDataService; private final int shardId; private final IndexSearcher searcher; - private String[] types = Strings.EMPTY_ARRAY; private boolean cacheable = true; private final SetOnce frozen = new SetOnce<>(); @@ -113,14 +108,6 @@ public class QueryShardContext extends QueryRewriteContext { private final Predicate indexNameMatcher; private final BooleanSupplier allowExpensiveQueries; - public void setTypes(String... types) { - this.types = types; - } - - public String[] getTypes() { - return types; - } - private final Map namedQueries = new HashMap<>(); private boolean allowUnmappedFields; private boolean mapUnmappedFieldAsString; @@ -145,6 +132,48 @@ public QueryShardContext( Predicate indexNameMatcher, BooleanSupplier allowExpensiveQueries, ValuesSourceRegistry valuesSourceRegistry + ) { + this( + shardId, + indexSettings, + bigArrays, + bitsetFilterCache, + indexFieldDataLookup, + mapperService, + similarityService, + scriptService, + xContentRegistry, + namedWriteableRegistry, + client, + searcher, + nowInMillis, + clusterAlias, + indexNameMatcher, + allowExpensiveQueries, + valuesSourceRegistry, + false + ); + } + + public QueryShardContext( + int shardId, + IndexSettings indexSettings, + BigArrays bigArrays, + BitsetFilterCache bitsetFilterCache, + TriFunction, IndexFieldData> indexFieldDataLookup, + MapperService mapperService, + SimilarityService similarityService, + ScriptService scriptService, + NamedXContentRegistry xContentRegistry, + NamedWriteableRegistry namedWriteableRegistry, + Client client, + IndexSearcher searcher, + LongSupplier nowInMillis, + String clusterAlias, + Predicate indexNameMatcher, + BooleanSupplier allowExpensiveQueries, + ValuesSourceRegistry valuesSourceRegistry, + boolean validate ) { this( shardId, @@ -166,7 +195,8 @@ public QueryShardContext( indexSettings.getIndex().getUUID() ), allowExpensiveQueries, - valuesSourceRegistry + valuesSourceRegistry, + validate ); } @@ -188,7 +218,8 @@ public QueryShardContext(QueryShardContext source) { source.indexNameMatcher, source.fullyQualifiedIndex, source.allowExpensiveQueries, - source.valuesSourceRegistry + source.valuesSourceRegistry, + source.validate() ); } @@ -209,9 +240,10 @@ private QueryShardContext( Predicate indexNameMatcher, Index fullyQualifiedIndex, BooleanSupplier allowExpensiveQueries, - ValuesSourceRegistry valuesSourceRegistry + ValuesSourceRegistry valuesSourceRegistry, + boolean validate ) { - super(xContentRegistry, namedWriteableRegistry, client, nowInMillis); + super(xContentRegistry, namedWriteableRegistry, client, nowInMillis, validate); this.shardId = shardId; this.similarityService = similarityService; this.mapperService = mapperService; @@ -325,11 +357,11 @@ public Set sourcePath(String fullName) { } /** - * Returns s {@link DocumentMapper} instance for the given type. - * Delegates to {@link MapperService#documentMapper(String)} + * Returns s {@link DocumentMapper} instance. + * Delegates to {@link MapperService#documentMapper()} */ public DocumentMapper documentMapper(String type) { - return mapperService.documentMapper(type); + return mapperService.documentMapper(); } /** @@ -377,18 +409,6 @@ MappedFieldType failIfFieldMappingNotFound(String name, MappedFieldType fieldMap } } - /** - * Returns the narrowed down explicit types, or, if not set, all types. - */ - public Collection queryTypes() { - String[] types = getTypes(); - if (types == null || types.length == 0 || (types.length == 1 && types[0].equals("_all"))) { - DocumentMapper mapper = getMapperService().documentMapper(); - return mapper == null ? Collections.emptyList() : Collections.singleton(mapper.type()); - } - return Arrays.asList(types); - } - private SearchLookup lookup = null; /** @@ -398,8 +418,7 @@ public SearchLookup lookup() { if (this.lookup == null) { this.lookup = new SearchLookup( getMapperService(), - (fieldType, searchLookup) -> indexFieldDataService.apply(fieldType, fullyQualifiedIndex.getName(), searchLookup), - types + (fieldType, searchLookup) -> indexFieldDataService.apply(fieldType, fullyQualifiedIndex.getName(), searchLookup) ); } return this.lookup; @@ -415,8 +434,7 @@ public SearchLookup newFetchLookup() { */ return new SearchLookup( getMapperService(), - (fieldType, searchLookup) -> indexFieldDataService.apply(fieldType, fullyQualifiedIndex.getName(), searchLookup), - types + (fieldType, searchLookup) -> indexFieldDataService.apply(fieldType, fullyQualifiedIndex.getName(), searchLookup) ); } diff --git a/server/src/main/java/org/opensearch/index/query/RangeQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/RangeQueryBuilder.java index 1c27946514a3d..80b792d750546 100644 --- a/server/src/main/java/org/opensearch/index/query/RangeQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/RangeQueryBuilder.java @@ -452,7 +452,7 @@ protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteC } DateMathParser dateMathParser = getForceDateParser(); - return fieldType.isFieldWithinQuery( + final MappedFieldType.Relation relation = fieldType.isFieldWithinQuery( shardContext.getIndexReader(), from, to, @@ -462,6 +462,13 @@ protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteC dateMathParser, queryRewriteContext ); + + // For validation, always assume that there is an intersection + if (relation == MappedFieldType.Relation.DISJOINT && shardContext.validate()) { + return MappedFieldType.Relation.INTERSECTS; + } + + return relation; } // Not on the shard, we have no way to know what the relation is. diff --git a/server/src/main/java/org/opensearch/index/query/ScriptQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/ScriptQueryBuilder.java index 881323b05e536..8739e48eb411b 100644 --- a/server/src/main/java/org/opensearch/index/query/ScriptQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/ScriptQueryBuilder.java @@ -43,9 +43,11 @@ import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.opensearch.OpenSearchException; +import org.opensearch.common.Nullable; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.lucene.search.function.Functions; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.script.FilterScript; @@ -153,17 +155,19 @@ protected Query doToQuery(QueryShardContext context) throws IOException { } FilterScript.Factory factory = context.compile(script, FilterScript.CONTEXT); FilterScript.LeafFactory filterScript = factory.newFactory(script.getParams(), context.lookup()); - return new ScriptQuery(script, filterScript); + return new ScriptQuery(script, filterScript, queryName); } static class ScriptQuery extends Query { final Script script; final FilterScript.LeafFactory filterScript; + final String queryName; - ScriptQuery(Script script, FilterScript.LeafFactory filterScript) { + ScriptQuery(Script script, FilterScript.LeafFactory filterScript, @Nullable String queryName) { this.script = script; this.filterScript = filterScript; + this.queryName = queryName; } @Override @@ -171,6 +175,7 @@ public String toString(String field) { StringBuilder buffer = new StringBuilder(); buffer.append("ScriptQuery("); buffer.append(script); + buffer.append(Functions.nameOrEmptyArg(queryName)); buffer.append(")"); return buffer.toString(); } diff --git a/server/src/main/java/org/opensearch/index/query/TermsQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/TermsQueryBuilder.java index 9e0446ae4d802..ac29cb2cf5201 100644 --- a/server/src/main/java/org/opensearch/index/query/TermsQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/TermsQueryBuilder.java @@ -225,10 +225,6 @@ public TermsLookup termsLookup() { return this.termsLookup; } - public boolean isTypeless() { - return termsLookup == null || termsLookup.type() == null; - } - private static final Set> INTEGER_TYPES = new HashSet<>( Arrays.asList(Byte.class, Short.class, Integer.class, Long.class) ); @@ -479,9 +475,7 @@ protected Query doToQuery(QueryShardContext context) throws IOException { } private void fetch(TermsLookup termsLookup, Client client, ActionListener> actionListener) { - GetRequest getRequest = termsLookup.type() == null - ? new GetRequest(termsLookup.index(), termsLookup.id()) - : new GetRequest(termsLookup.index(), termsLookup.type(), termsLookup.id()); + GetRequest getRequest = new GetRequest(termsLookup.index(), termsLookup.id()); getRequest.preference("_local").routing(termsLookup.routing()); client.get(getRequest, ActionListener.delegateFailure(actionListener, (delegatedListener, getResponse) -> { List terms = new ArrayList<>(); diff --git a/server/src/main/java/org/opensearch/index/query/TypeQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/TypeQueryBuilder.java deleted file mode 100644 index f8732586ec50a..0000000000000 --- a/server/src/main/java/org/opensearch/index/query/TypeQueryBuilder.java +++ /dev/null @@ -1,158 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.index.query; - -import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.Query; -import org.opensearch.common.ParseField; -import org.opensearch.common.ParsingException; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.logging.DeprecationLogger; -import org.opensearch.common.lucene.search.Queries; -import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentParser; -import org.opensearch.index.mapper.DocumentMapper; - -import java.io.IOException; -import java.util.Objects; - -public class TypeQueryBuilder extends AbstractQueryBuilder { - public static final String NAME = "type"; - - private static final ParseField VALUE_FIELD = new ParseField("value"); - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(TypeQueryBuilder.class); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Type queries are deprecated, " - + "prefer to filter on a field instead."; - - private final String type; - - public TypeQueryBuilder(String type) { - if (type == null) { - throw new IllegalArgumentException("[type] cannot be null"); - } - this.type = type; - } - - /** - * Read from a stream. - */ - public TypeQueryBuilder(StreamInput in) throws IOException { - super(in); - type = in.readString(); - } - - @Override - protected void doWriteTo(StreamOutput out) throws IOException { - out.writeString(type); - } - - public String type() { - return type; - } - - @Override - protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(NAME); - builder.field(VALUE_FIELD.getPreferredName(), type); - printBoostAndQueryName(builder); - builder.endObject(); - } - - public static TypeQueryBuilder fromXContent(XContentParser parser) throws IOException { - String type = null; - String queryName = null; - float boost = AbstractQueryBuilder.DEFAULT_BOOST; - String currentFieldName = null; - XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - queryName = parser.text(); - } else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - boost = parser.floatValue(); - } else if (VALUE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - type = parser.text(); - } else { - throw new ParsingException( - parser.getTokenLocation(), - "[" + TypeQueryBuilder.NAME + "] filter doesn't support [" + currentFieldName + "]" - ); - } - } else { - throw new ParsingException( - parser.getTokenLocation(), - "[" + TypeQueryBuilder.NAME + "] filter doesn't support [" + currentFieldName + "]" - ); - } - } - - if (type == null) { - throw new ParsingException( - parser.getTokenLocation(), - "[" + TypeQueryBuilder.NAME + "] filter needs to be provided with a value for the type" - ); - } - return new TypeQueryBuilder(type).boost(boost).queryName(queryName); - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - protected Query doToQuery(QueryShardContext context) throws IOException { - deprecationLogger.deprecate("type_query", TYPES_DEPRECATION_MESSAGE); - // LUCENE 4 UPGRADE document mapper should use bytesref as well? - DocumentMapper documentMapper = context.getMapperService().documentMapper(type); - if (documentMapper == null) { - // no type means no documents - return new MatchNoDocsQuery(); - } else { - return Queries.newNonNestedFilter(context.indexVersionCreated()); - } - } - - @Override - protected int doHashCode() { - return Objects.hash(type); - } - - @Override - protected boolean doEquals(TypeQueryBuilder other) { - return Objects.equals(type, other.type); - } -} diff --git a/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunction.java b/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunction.java index 8a595dda07979..02d01ef470b61 100644 --- a/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunction.java +++ b/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunction.java @@ -33,6 +33,7 @@ package org.opensearch.index.query.functionscore; import org.apache.lucene.search.Explanation; +import org.opensearch.common.Nullable; /** * Implement this interface to provide a decay function that is executed on a @@ -45,7 +46,7 @@ public interface DecayFunction { double evaluate(double value, double scale); - Explanation explainFunction(String valueString, double value, double scale); + Explanation explainFunction(String valueString, double value, double scale, @Nullable String functionName); /** * The final scale parameter is computed from the scale parameter given by diff --git a/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunctionBuilder.java b/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunctionBuilder.java index 3ddacb1305536..0ee61b34cd279 100644 --- a/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunctionBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunctionBuilder.java @@ -35,6 +35,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Explanation; import org.opensearch.OpenSearchParseException; +import org.opensearch.common.Nullable; import org.opensearch.common.ParsingException; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.geo.GeoDistance; @@ -93,10 +94,31 @@ protected DecayFunctionBuilder(String fieldName, Object origin, Object scale, Ob this(fieldName, origin, scale, offset, DEFAULT_DECAY); } + /** + * Convenience constructor that converts its parameters into json to parse on the data nodes. + */ + protected DecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset, @Nullable String functionName) { + this(fieldName, origin, scale, offset, DEFAULT_DECAY, functionName); + } + /** * Convenience constructor that converts its parameters into json to parse on the data nodes. */ protected DecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset, double decay) { + this(fieldName, origin, scale, offset, decay, null); + } + + /** + * Convenience constructor that converts its parameters into json to parse on the data nodes. + */ + protected DecayFunctionBuilder( + String fieldName, + Object origin, + Object scale, + Object offset, + double decay, + @Nullable String functionName + ) { if (fieldName == null) { throw new IllegalArgumentException("decay function: field name must not be null"); } @@ -123,6 +145,7 @@ protected DecayFunctionBuilder(String fieldName, Object origin, Object scale, Ob } catch (IOException e) { throw new IllegalArgumentException("unable to build inner function object", e); } + setFunctionName(functionName); } protected DecayFunctionBuilder(String fieldName, BytesReference functionBytes) { @@ -285,7 +308,16 @@ private AbstractDistanceScoreFunction parseNumberVariable( ); } IndexNumericFieldData numericFieldData = context.getForField(fieldType); - return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode); + return new NumericFieldDataScoreFunction( + origin, + scale, + decay, + offset, + getDecayFunction(), + numericFieldData, + mode, + getFunctionName() + ); } private AbstractDistanceScoreFunction parseGeoVariable( @@ -325,7 +357,7 @@ private AbstractDistanceScoreFunction parseGeoVariable( double scale = DistanceUnit.DEFAULT.parse(scaleString, DistanceUnit.DEFAULT); double offset = DistanceUnit.DEFAULT.parse(offsetString, DistanceUnit.DEFAULT); IndexGeoPointFieldData indexFieldData = context.getForField(fieldType); - return new GeoFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), indexFieldData, mode); + return new GeoFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), indexFieldData, mode, getFunctionName()); } @@ -375,7 +407,16 @@ private AbstractDistanceScoreFunction parseDateVariable( val = TimeValue.parseTimeValue(offsetString, TimeValue.timeValueHours(24), DecayFunctionParser.class.getSimpleName() + ".offset"); double offset = val.getMillis(); IndexNumericFieldData numericFieldData = context.getForField(dateFieldType); - return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode); + return new NumericFieldDataScoreFunction( + origin, + scale, + decay, + offset, + getDecayFunction(), + numericFieldData, + mode, + getFunctionName() + ); } static class GeoFieldDataScoreFunction extends AbstractDistanceScoreFunction { @@ -392,9 +433,10 @@ static class GeoFieldDataScoreFunction extends AbstractDistanceScoreFunction { double offset, DecayFunction func, IndexGeoPointFieldData fieldData, - MultiValueMode mode + MultiValueMode mode, + @Nullable String functionName ) { - super(scale, decay, offset, func, mode); + super(scale, decay, offset, func, mode, functionName); this.origin = origin; this.fieldData = fieldData; } @@ -485,9 +527,10 @@ static class NumericFieldDataScoreFunction extends AbstractDistanceScoreFunction double offset, DecayFunction func, IndexNumericFieldData fieldData, - MultiValueMode mode + MultiValueMode mode, + @Nullable String functionName ) { - super(scale, decay, offset, func, mode); + super(scale, decay, offset, func, mode, functionName); this.fieldData = fieldData; this.origin = origin; } @@ -569,13 +612,15 @@ public abstract static class AbstractDistanceScoreFunction extends ScoreFunction protected final double offset; private final DecayFunction func; protected final MultiValueMode mode; + protected final String functionName; public AbstractDistanceScoreFunction( double userSuppiedScale, double decay, double offset, DecayFunction func, - MultiValueMode mode + MultiValueMode mode, + @Nullable String functionName ) { super(CombineFunction.MULTIPLY); this.mode = mode; @@ -591,6 +636,7 @@ public AbstractDistanceScoreFunction( throw new IllegalArgumentException(FunctionScoreQueryBuilder.NAME + " : offset must be > 0.0"); } this.offset = offset; + this.functionName = functionName; } /** @@ -624,7 +670,7 @@ public Explanation explainScore(int docId, Explanation subQueryScore) throws IOE return Explanation.match( (float) score(docId, subQueryScore.getValue().floatValue()), "Function for field " + getFieldName() + ":", - func.explainFunction(getDistanceString(ctx, docId), value, scale) + func.explainFunction(getDistanceString(ctx, docId), value, scale, functionName) ); } }; diff --git a/server/src/main/java/org/opensearch/index/query/functionscore/ExponentialDecayFunctionBuilder.java b/server/src/main/java/org/opensearch/index/query/functionscore/ExponentialDecayFunctionBuilder.java index 7f0a9c3a58d59..b78e75762fe11 100644 --- a/server/src/main/java/org/opensearch/index/query/functionscore/ExponentialDecayFunctionBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/functionscore/ExponentialDecayFunctionBuilder.java @@ -33,8 +33,10 @@ package org.opensearch.index.query.functionscore; import org.apache.lucene.search.Explanation; +import org.opensearch.common.Nullable; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.lucene.search.function.Functions; import java.io.IOException; @@ -45,6 +47,10 @@ public class ExponentialDecayFunctionBuilder extends DecayFunctionBuilder scoreFunctionBuilder) { - this(new MatchAllQueryBuilder(), new FilterFunctionBuilder[] { new FilterFunctionBuilder(scoreFunctionBuilder) }); + this(scoreFunctionBuilder, null); + } + + /** + * Creates a function_score query that will execute the function provided on all documents + * + * @param scoreFunctionBuilder score function that is executed + * @param queryName the query name + */ + public FunctionScoreQueryBuilder(ScoreFunctionBuilder scoreFunctionBuilder, @Nullable String queryName) { + this( + new MatchAllQueryBuilder().queryName(queryName), + new FilterFunctionBuilder[] { new FilterFunctionBuilder(scoreFunctionBuilder) } + ); } /** @@ -316,15 +340,17 @@ protected Query doToQuery(QueryShardContext context) throws IOException { int i = 0; for (FilterFunctionBuilder filterFunctionBuilder : filterFunctionBuilders) { ScoreFunction scoreFunction = filterFunctionBuilder.getScoreFunction().toFunction(context); - if (filterFunctionBuilder.getFilter().getName().equals(MatchAllQueryBuilder.NAME)) { + final QueryBuilder builder = filterFunctionBuilder.getFilter(); + if (builder.getName().equals(MatchAllQueryBuilder.NAME)) { filterFunctions[i++] = scoreFunction; } else { - Query filter = filterFunctionBuilder.getFilter().toQuery(context); - filterFunctions[i++] = new FunctionScoreQuery.FilterScoreFunction(filter, scoreFunction); + Query filter = builder.toQuery(context); + filterFunctions[i++] = new FunctionScoreQuery.FilterScoreFunction(filter, scoreFunction, builder.queryName()); } } - Query query = this.query.toQuery(context); + final QueryBuilder builder = this.query; + Query query = builder.toQuery(context); if (query == null) { query = new MatchAllDocsQuery(); } @@ -332,12 +358,12 @@ protected Query doToQuery(QueryShardContext context) throws IOException { CombineFunction boostMode = this.boostMode == null ? DEFAULT_BOOST_MODE : this.boostMode; // handle cases where only one score function and no filter was provided. In this case we create a FunctionScoreQuery. if (filterFunctions.length == 0) { - return new FunctionScoreQuery(query, minScore, maxBoost); + return new FunctionScoreQuery(query, builder.queryName(), minScore, maxBoost); } else if (filterFunctions.length == 1 && filterFunctions[0] instanceof FunctionScoreQuery.FilterScoreFunction == false) { - return new FunctionScoreQuery(query, filterFunctions[0], boostMode, minScore, maxBoost); + return new FunctionScoreQuery(query, builder.queryName(), filterFunctions[0], boostMode, minScore, maxBoost); } // in all other cases we create a FunctionScoreQuery with filters - return new FunctionScoreQuery(query, scoreMode, filterFunctions, boostMode, minScore, maxBoost); + return new FunctionScoreQuery(query, builder.queryName(), scoreMode, filterFunctions, boostMode, minScore, maxBoost); } /** @@ -606,6 +632,7 @@ private static String parseFiltersAndFunctions( QueryBuilder filter = null; ScoreFunctionBuilder scoreFunction = null; Float functionWeight = null; + String functionName = null; if (token != XContentParser.Token.START_OBJECT) { throw new ParsingException( parser.getTokenLocation(), @@ -635,6 +662,8 @@ private static String parseFiltersAndFunctions( } else if (token.isValue()) { if (WEIGHT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { functionWeight = parser.floatValue(); + } else if (NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + functionName = parser.text(); } else { throw new ParsingException( parser.getTokenLocation(), @@ -652,6 +681,10 @@ private static String parseFiltersAndFunctions( scoreFunction.setWeight(functionWeight); } } + + if (functionName != null && scoreFunction != null) { + scoreFunction.setFunctionName(functionName); + } } if (filter == null) { filter = new MatchAllQueryBuilder(); diff --git a/server/src/main/java/org/opensearch/index/query/functionscore/GaussDecayFunctionBuilder.java b/server/src/main/java/org/opensearch/index/query/functionscore/GaussDecayFunctionBuilder.java index c208083da08f5..ac6ae33cb4ed0 100644 --- a/server/src/main/java/org/opensearch/index/query/functionscore/GaussDecayFunctionBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/functionscore/GaussDecayFunctionBuilder.java @@ -33,9 +33,11 @@ package org.opensearch.index.query.functionscore; import org.apache.lucene.search.Explanation; +import org.opensearch.common.Nullable; import org.opensearch.common.ParseField; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.lucene.search.function.Functions; import java.io.IOException; @@ -49,10 +51,25 @@ public GaussDecayFunctionBuilder(String fieldName, Object origin, Object scale, super(fieldName, origin, scale, offset); } + public GaussDecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset, @Nullable String functionName) { + super(fieldName, origin, scale, offset, functionName); + } + public GaussDecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset, double decay) { super(fieldName, origin, scale, offset, decay); } + public GaussDecayFunctionBuilder( + String fieldName, + Object origin, + Object scale, + Object offset, + double decay, + @Nullable String functionName + ) { + super(fieldName, origin, scale, offset, decay, functionName); + } + GaussDecayFunctionBuilder(String fieldName, BytesReference functionBytes) { super(fieldName, functionBytes); } @@ -75,7 +92,6 @@ public DecayFunction getDecayFunction() { } private static final class GaussScoreFunction implements DecayFunction { - @Override public double evaluate(double value, double scale) { // note that we already computed scale^2 in processScale() so we do @@ -84,8 +100,11 @@ public double evaluate(double value, double scale) { } @Override - public Explanation explainFunction(String valueExpl, double value, double scale) { - return Explanation.match((float) evaluate(value, scale), "exp(-0.5*pow(" + valueExpl + ",2.0)/" + -1 * scale + ")"); + public Explanation explainFunction(String valueExpl, double value, double scale, @Nullable String functionName) { + return Explanation.match( + (float) evaluate(value, scale), + "exp(-0.5*pow(" + valueExpl + ",2.0)/" + -1 * scale + Functions.nameOrEmptyArg(functionName) + ")" + ); } @Override diff --git a/server/src/main/java/org/opensearch/index/query/functionscore/LinearDecayFunctionBuilder.java b/server/src/main/java/org/opensearch/index/query/functionscore/LinearDecayFunctionBuilder.java index 762757eb156e4..03102e45a41ba 100644 --- a/server/src/main/java/org/opensearch/index/query/functionscore/LinearDecayFunctionBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/functionscore/LinearDecayFunctionBuilder.java @@ -33,8 +33,10 @@ package org.opensearch.index.query.functionscore; import org.apache.lucene.search.Explanation; +import org.opensearch.common.Nullable; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.lucene.search.function.Functions; import java.io.IOException; @@ -47,10 +49,25 @@ public LinearDecayFunctionBuilder(String fieldName, Object origin, Object scale, super(fieldName, origin, scale, offset); } + public LinearDecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset, @Nullable String functionName) { + super(fieldName, origin, scale, offset, functionName); + } + public LinearDecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset, double decay) { super(fieldName, origin, scale, offset, decay); } + public LinearDecayFunctionBuilder( + String fieldName, + Object origin, + Object scale, + Object offset, + double decay, + @Nullable String functionName + ) { + super(fieldName, origin, scale, offset, decay, functionName); + } + LinearDecayFunctionBuilder(String fieldName, BytesReference functionBytes) { super(fieldName, functionBytes); } @@ -80,8 +97,11 @@ public double evaluate(double value, double scale) { } @Override - public Explanation explainFunction(String valueExpl, double value, double scale) { - return Explanation.match((float) evaluate(value, scale), "max(0.0, ((" + scale + " - " + valueExpl + ")/" + scale + ")"); + public Explanation explainFunction(String valueExpl, double value, double scale, @Nullable String functionName) { + return Explanation.match( + (float) evaluate(value, scale), + "max(0.0, ((" + scale + " - " + valueExpl + ")/" + scale + Functions.nameOrEmptyArg(functionName) + ")" + ); } @Override diff --git a/server/src/main/java/org/opensearch/index/query/functionscore/RandomScoreFunctionBuilder.java b/server/src/main/java/org/opensearch/index/query/functionscore/RandomScoreFunctionBuilder.java index 730be404feb14..26495c93082ae 100644 --- a/server/src/main/java/org/opensearch/index/query/functionscore/RandomScoreFunctionBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/functionscore/RandomScoreFunctionBuilder.java @@ -31,6 +31,7 @@ package org.opensearch.index.query.functionscore; +import org.opensearch.common.Nullable; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -58,6 +59,10 @@ public class RandomScoreFunctionBuilder extends ScoreFunctionBuilder> implements ToXContentFragment, NamedWriteable { private Float weight; + private String functionName; /** * Standard empty constructor. @@ -58,11 +60,17 @@ public ScoreFunctionBuilder() {} */ public ScoreFunctionBuilder(StreamInput in) throws IOException { weight = checkWeight(in.readOptionalFloat()); + if (in.getVersion().onOrAfter(Version.V_1_3_0)) { + functionName = in.readOptionalString(); + } } @Override public final void writeTo(StreamOutput out) throws IOException { out.writeOptionalFloat(weight); + if (out.getVersion().onOrAfter(Version.V_1_3_0)) { + out.writeOptionalString(functionName); + } doWriteTo(out); } @@ -99,11 +107,30 @@ public final Float getWeight() { return weight; } + /** + * The name of this function + */ + public String getFunctionName() { + return functionName; + } + + /** + * Set the name of this function + */ + public void setFunctionName(String functionName) { + this.functionName = functionName; + } + @Override public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { if (weight != null) { builder.field(FunctionScoreQueryBuilder.WEIGHT_FIELD.getPreferredName(), weight); } + + if (functionName != null) { + builder.field(FunctionScoreQueryBuilder.NAME_FIELD.getPreferredName(), functionName); + } + doXContent(builder, params); return builder; } @@ -128,7 +155,7 @@ public final boolean equals(Object obj) { } @SuppressWarnings("unchecked") FB other = (FB) obj; - return Objects.equals(weight, other.getWeight()) && doEquals(other); + return Objects.equals(weight, other.getWeight()) && Objects.equals(functionName, other.getFunctionName()) && doEquals(other); } /** @@ -139,7 +166,7 @@ public final boolean equals(Object obj) { @Override public final int hashCode() { - return Objects.hash(getClass(), weight, doHashCode()); + return Objects.hash(getClass(), weight, functionName, doHashCode()); } /** @@ -156,7 +183,7 @@ public final ScoreFunction toFunction(QueryShardContext context) throws IOExcept if (weight == null) { return scoreFunction; } - return new WeightFactorFunction(weight, scoreFunction); + return new WeightFactorFunction(weight, scoreFunction, getFunctionName()); } /** diff --git a/server/src/main/java/org/opensearch/index/query/functionscore/ScoreFunctionBuilders.java b/server/src/main/java/org/opensearch/index/query/functionscore/ScoreFunctionBuilders.java index 54dca40208c00..59d02e9381d7e 100644 --- a/server/src/main/java/org/opensearch/index/query/functionscore/ScoreFunctionBuilders.java +++ b/server/src/main/java/org/opensearch/index/query/functionscore/ScoreFunctionBuilders.java @@ -32,6 +32,7 @@ package org.opensearch.index.query.functionscore; +import org.opensearch.common.Nullable; import org.opensearch.script.Script; import org.opensearch.script.ScriptType; @@ -46,10 +47,29 @@ public static ExponentialDecayFunctionBuilder exponentialDecayFunction(String fi return new ExponentialDecayFunctionBuilder(fieldName, origin, scale, null); } + public static ExponentialDecayFunctionBuilder exponentialDecayFunction( + String fieldName, + Object origin, + Object scale, + @Nullable String functionName + ) { + return new ExponentialDecayFunctionBuilder(fieldName, origin, scale, null, functionName); + } + public static ExponentialDecayFunctionBuilder exponentialDecayFunction(String fieldName, Object origin, Object scale, Object offset) { return new ExponentialDecayFunctionBuilder(fieldName, origin, scale, offset); } + public static ExponentialDecayFunctionBuilder exponentialDecayFunction( + String fieldName, + Object origin, + Object scale, + Object offset, + @Nullable String functionName + ) { + return new ExponentialDecayFunctionBuilder(fieldName, origin, scale, offset, functionName); + } + public static ExponentialDecayFunctionBuilder exponentialDecayFunction( String fieldName, Object origin, @@ -60,10 +80,30 @@ public static ExponentialDecayFunctionBuilder exponentialDecayFunction( return new ExponentialDecayFunctionBuilder(fieldName, origin, scale, offset, decay); } + public static ExponentialDecayFunctionBuilder exponentialDecayFunction( + String fieldName, + Object origin, + Object scale, + Object offset, + double decay, + @Nullable String functionName + ) { + return new ExponentialDecayFunctionBuilder(fieldName, origin, scale, offset, decay, functionName); + } + public static GaussDecayFunctionBuilder gaussDecayFunction(String fieldName, Object origin, Object scale) { return new GaussDecayFunctionBuilder(fieldName, origin, scale, null); } + public static GaussDecayFunctionBuilder gaussDecayFunction( + String fieldName, + Object origin, + Object scale, + @Nullable String functionName + ) { + return new GaussDecayFunctionBuilder(fieldName, origin, scale, null, functionName); + } + public static GaussDecayFunctionBuilder gaussDecayFunction(String fieldName, Object origin, Object scale, Object offset) { return new GaussDecayFunctionBuilder(fieldName, origin, scale, offset); } @@ -72,6 +112,26 @@ public static GaussDecayFunctionBuilder gaussDecayFunction(String fieldName, Obj return new GaussDecayFunctionBuilder(fieldName, origin, scale, offset, decay); } + public static GaussDecayFunctionBuilder gaussDecayFunction( + String fieldName, + Object origin, + Object scale, + Object offset, + double decay, + @Nullable String functionName + ) { + return new GaussDecayFunctionBuilder(fieldName, origin, scale, offset, decay, functionName); + } + + public static LinearDecayFunctionBuilder linearDecayFunction( + String fieldName, + Object origin, + Object scale, + @Nullable String functionName + ) { + return new LinearDecayFunctionBuilder(fieldName, origin, scale, null, functionName); + } + public static LinearDecayFunctionBuilder linearDecayFunction(String fieldName, Object origin, Object scale) { return new LinearDecayFunctionBuilder(fieldName, origin, scale, null); } @@ -80,6 +140,16 @@ public static LinearDecayFunctionBuilder linearDecayFunction(String fieldName, O return new LinearDecayFunctionBuilder(fieldName, origin, scale, offset); } + public static LinearDecayFunctionBuilder linearDecayFunction( + String fieldName, + Object origin, + Object scale, + Object offset, + @Nullable String functionName + ) { + return new LinearDecayFunctionBuilder(fieldName, origin, scale, offset, functionName); + } + public static LinearDecayFunctionBuilder linearDecayFunction( String fieldName, Object origin, @@ -90,23 +160,54 @@ public static LinearDecayFunctionBuilder linearDecayFunction( return new LinearDecayFunctionBuilder(fieldName, origin, scale, offset, decay); } + public static LinearDecayFunctionBuilder linearDecayFunction( + String fieldName, + Object origin, + Object scale, + Object offset, + double decay, + @Nullable String functionName + ) { + return new LinearDecayFunctionBuilder(fieldName, origin, scale, offset, decay, functionName); + } + public static ScriptScoreFunctionBuilder scriptFunction(Script script) { - return (new ScriptScoreFunctionBuilder(script)); + return scriptFunction(script, null); } public static ScriptScoreFunctionBuilder scriptFunction(String script) { - return (new ScriptScoreFunctionBuilder(new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, script, emptyMap()))); + return scriptFunction(script, null); } public static RandomScoreFunctionBuilder randomFunction() { - return new RandomScoreFunctionBuilder(); + return randomFunction(null); } public static WeightBuilder weightFactorFunction(float weight) { - return (WeightBuilder) (new WeightBuilder().setWeight(weight)); + return weightFactorFunction(weight, null); } public static FieldValueFactorFunctionBuilder fieldValueFactorFunction(String fieldName) { - return new FieldValueFactorFunctionBuilder(fieldName); + return fieldValueFactorFunction(fieldName, null); + } + + public static ScriptScoreFunctionBuilder scriptFunction(Script script, @Nullable String functionName) { + return new ScriptScoreFunctionBuilder(script, functionName); + } + + public static ScriptScoreFunctionBuilder scriptFunction(String script, @Nullable String functionName) { + return new ScriptScoreFunctionBuilder(new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, script, emptyMap()), functionName); + } + + public static RandomScoreFunctionBuilder randomFunction(@Nullable String functionName) { + return new RandomScoreFunctionBuilder(functionName); + } + + public static WeightBuilder weightFactorFunction(float weight, @Nullable String functionName) { + return (WeightBuilder) (new WeightBuilder(functionName).setWeight(weight)); + } + + public static FieldValueFactorFunctionBuilder fieldValueFactorFunction(String fieldName, @Nullable String functionName) { + return new FieldValueFactorFunctionBuilder(fieldName, functionName); } } diff --git a/server/src/main/java/org/opensearch/index/query/functionscore/ScriptScoreFunctionBuilder.java b/server/src/main/java/org/opensearch/index/query/functionscore/ScriptScoreFunctionBuilder.java index 8b6cbe3a1bafd..2701e5867edde 100644 --- a/server/src/main/java/org/opensearch/index/query/functionscore/ScriptScoreFunctionBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/functionscore/ScriptScoreFunctionBuilder.java @@ -32,6 +32,7 @@ package org.opensearch.index.query.functionscore; +import org.opensearch.common.Nullable; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -57,10 +58,15 @@ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder { */ public WeightBuilder() {} + /** + * Standard constructor. + */ + public WeightBuilder(@Nullable String functionName) { + setFunctionName(functionName); + } + /** * Read from a stream. */ diff --git a/server/src/main/java/org/opensearch/index/reindex/AbstractBulkByScrollRequest.java b/server/src/main/java/org/opensearch/index/reindex/AbstractBulkByScrollRequest.java index b80dce43cbdcb..0de9a50dd1e8d 100644 --- a/server/src/main/java/org/opensearch/index/reindex/AbstractBulkByScrollRequest.java +++ b/server/src/main/java/org/opensearch/index/reindex/AbstractBulkByScrollRequest.java @@ -503,9 +503,6 @@ protected void searchToString(StringBuilder b) { } else { b.append("[all indices]"); } - if (searchRequest.types() != null && searchRequest.types().length != 0) { - b.append(Arrays.toString(searchRequest.types())); - } } @Override diff --git a/server/src/main/java/org/opensearch/index/reindex/BulkByScrollResponse.java b/server/src/main/java/org/opensearch/index/reindex/BulkByScrollResponse.java index 7d5c0e151ddbc..6a4b55f5877e7 100644 --- a/server/src/main/java/org/opensearch/index/reindex/BulkByScrollResponse.java +++ b/server/src/main/java/org/opensearch/index/reindex/BulkByScrollResponse.java @@ -240,7 +240,6 @@ private static Object parseFailure(XContentParser parser) throws IOException { ensureExpectedToken(Token.START_OBJECT, parser.currentToken(), parser); Token token; String index = null; - String type = null; String id = null; Integer status = null; Integer shardId = null; @@ -270,9 +269,6 @@ private static Object parseFailure(XContentParser parser) throws IOException { case Failure.INDEX_FIELD: index = parser.text(); break; - case Failure.TYPE_FIELD: - type = parser.text(); - break; case Failure.ID_FIELD: id = parser.text(); break; @@ -298,7 +294,7 @@ private static Object parseFailure(XContentParser parser) throws IOException { } } if (bulkExc != null) { - return new Failure(index, type, id, bulkExc, RestStatus.fromCode(status)); + return new Failure(index, id, bulkExc, RestStatus.fromCode(status)); } else if (searchExc != null) { if (status == null) { return new SearchFailure(searchExc, index, shardId, nodeId); diff --git a/server/src/main/java/org/opensearch/index/reindex/ClientScrollableHitSource.java b/server/src/main/java/org/opensearch/index/reindex/ClientScrollableHitSource.java index 6ad793c2fb911..6a78d16d3b385 100644 --- a/server/src/main/java/org/opensearch/index/reindex/ClientScrollableHitSource.java +++ b/server/src/main/java/org/opensearch/index/reindex/ClientScrollableHitSource.java @@ -91,9 +91,8 @@ public ClientScrollableHitSource( public void doStart(RejectAwareActionListener searchListener) { if (logger.isDebugEnabled()) { logger.debug( - "executing initial scroll against {}{}", - isEmpty(firstSearchRequest.indices()) ? "all indices" : firstSearchRequest.indices(), - isEmpty(firstSearchRequest.types()) ? "" : firstSearchRequest.types() + "executing initial scroll against {}", + isEmpty(firstSearchRequest.indices()) ? "all indices" : firstSearchRequest.indices() ); } client.search(firstSearchRequest, wrapListener(searchListener)); @@ -192,11 +191,6 @@ public String getIndex() { return delegate.getIndex(); } - @Override - public String getType() { - return delegate.getType(); - } - @Override public String getId() { return delegate.getId(); diff --git a/server/src/main/java/org/opensearch/index/reindex/DeleteByQueryRequest.java b/server/src/main/java/org/opensearch/index/reindex/DeleteByQueryRequest.java index 74a11e62ed851..4f297b89c0651 100644 --- a/server/src/main/java/org/opensearch/index/reindex/DeleteByQueryRequest.java +++ b/server/src/main/java/org/opensearch/index/reindex/DeleteByQueryRequest.java @@ -102,19 +102,6 @@ public DeleteByQueryRequest setQuery(QueryBuilder query) { return this; } - /** - * Set the document types for the delete - * @deprecated Types are in the process of being removed. Instead of - * using a type, prefer to filter on a field of the document. - */ - @Deprecated - public DeleteByQueryRequest setDocTypes(String... types) { - if (types != null) { - getSearchRequest().types(types); - } - return this; - } - /** * Set routing limiting the process to the shards that match that routing value */ @@ -155,21 +142,6 @@ public String getRouting() { return getSearchRequest().routing(); } - /** - * Gets the document types on which this request would be executed. Returns an empty array if all - * types are to be processed. - * @deprecated Types are in the process of being removed. Instead of - * using a type, prefer to filter on a field of the document. - */ - @Deprecated - public String[] getDocTypes() { - if (getSearchRequest().types() != null) { - return getSearchRequest().types(); - } else { - return new String[0]; - } - } - @Override protected DeleteByQueryRequest self() { return this; @@ -223,29 +195,6 @@ public IndicesOptions indicesOptions() { return getSearchRequest().indicesOptions(); } - /** - * Gets the document types on which this request would be executed. - * @deprecated Types are in the process of being removed. Instead of - * using a type, prefer to filter on a field of the document. - */ - @Deprecated - public String[] types() { - assert getSearchRequest() != null; - return getSearchRequest().types(); - } - - /** - * Set the document types for the delete - * @deprecated Types are in the process of being removed. Instead of - * using a type, prefer to filter on a field of the document. - */ - @Deprecated - public DeleteByQueryRequest types(String... types) { - assert getSearchRequest() != null; - getSearchRequest().types(types); - return this; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/server/src/main/java/org/opensearch/index/reindex/ReindexRequest.java b/server/src/main/java/org/opensearch/index/reindex/ReindexRequest.java index 6631b721d1b88..5858b4b8108d2 100644 --- a/server/src/main/java/org/opensearch/index/reindex/ReindexRequest.java +++ b/server/src/main/java/org/opensearch/index/reindex/ReindexRequest.java @@ -50,7 +50,6 @@ import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.index.VersionType; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.QueryBuilder; import org.opensearch.script.Script; import org.opensearch.search.sort.SortOrder; @@ -167,16 +166,6 @@ public ReindexRequest setSourceIndices(String... sourceIndices) { return this; } - /** - * Set the document types which need to be copied from the source indices - */ - public ReindexRequest setSourceDocTypes(String... docTypes) { - if (docTypes != null) { - this.getSearchRequest().types(docTypes); - } - return this; - } - /** * Sets the scroll size for setting how many documents are to be processed in one batch during reindex */ @@ -219,14 +208,6 @@ public ReindexRequest setDestIndex(String destIndex) { return this; } - /** - * Set the document type for the destination index - */ - public ReindexRequest setDestDocType(String docType) { - this.getDestination().type(docType); - return this; - } - /** * Set the routing to decide which shard the documents need to be routed to */ @@ -313,9 +294,6 @@ public String toString() { } searchToString(b); b.append(" to [").append(destination.index()).append(']'); - if (destination.type() != null) { - b.append('[').append(destination.type()).append(']'); - } return b.toString(); } @@ -330,10 +308,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.rawField("query", remoteInfo.getQuery().streamInput(), RemoteInfo.QUERY_CONTENT_TYPE.type()); } builder.array("index", getSearchRequest().indices()); - String[] types = getSearchRequest().types(); - if (types.length > 0) { - builder.array("type", types); - } getSearchRequest().source().innerToXContent(builder, params); builder.endObject(); } @@ -341,10 +315,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws // build destination builder.startObject("dest"); builder.field("index", getDestination().index()); - String type = getDestination().type(); - if (type != null && type.equals(MapperService.SINGLE_MAPPING_NAME) == false) { - builder.field("type", getDestination().type()); - } if (getDestination().routing() != null) { builder.field("routing", getDestination().routing()); } @@ -383,11 +353,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (indices != null) { request.getSearchRequest().indices(indices); } - String[] types = extractStringArray(source, "type"); - if (types != null) { - deprecationLogger.deprecate("reindex_with_types", TYPES_DEPRECATION_MESSAGE); - request.getSearchRequest().types(types); - } request.setRemoteInfo(buildRemoteInfo(source)); XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType()); builder.map(source); @@ -403,10 +368,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws ObjectParser destParser = new ObjectParser<>("dest"); destParser.declareString(IndexRequest::index, new ParseField("index")); - destParser.declareString((request, type) -> { - deprecationLogger.deprecate("reindex_with_types", TYPES_DEPRECATION_MESSAGE); - request.type(type); - }, new ParseField("type")); destParser.declareString(IndexRequest::routing, new ParseField("routing")); destParser.declareString(IndexRequest::opType, new ParseField("op_type")); destParser.declareString(IndexRequest::setPipeline, new ParseField("pipeline")); diff --git a/server/src/main/java/org/opensearch/index/reindex/ReindexRequestBuilder.java b/server/src/main/java/org/opensearch/index/reindex/ReindexRequestBuilder.java index a8d518414a53d..291acd1e8ad8d 100644 --- a/server/src/main/java/org/opensearch/index/reindex/ReindexRequestBuilder.java +++ b/server/src/main/java/org/opensearch/index/reindex/ReindexRequestBuilder.java @@ -78,14 +78,6 @@ public ReindexRequestBuilder destination(String index) { return this; } - /** - * Set the destination index and type. - */ - public ReindexRequestBuilder destination(String index, String type) { - destination.setIndex(index).setType(type); - return this; - } - /** * Setup reindexing from a remote cluster. */ diff --git a/server/src/main/java/org/opensearch/index/reindex/ScrollableHitSource.java b/server/src/main/java/org/opensearch/index/reindex/ScrollableHitSource.java index c1a8a913b0cf5..f0c720e21a73e 100644 --- a/server/src/main/java/org/opensearch/index/reindex/ScrollableHitSource.java +++ b/server/src/main/java/org/opensearch/index/reindex/ScrollableHitSource.java @@ -253,11 +253,6 @@ public interface Hit { */ String getIndex(); - /** - * The type that the hit has. - */ - String getType(); - /** * The document id of the hit. */ @@ -304,7 +299,6 @@ public interface Hit { */ public static class BasicHit implements Hit { private final String index; - private final String type; private final String id; private final long version; @@ -314,9 +308,8 @@ public static class BasicHit implements Hit { private long seqNo; private long primaryTerm; - public BasicHit(String index, String type, String id, long version) { + public BasicHit(String index, String id, long version) { this.index = index; - this.type = type; this.id = id; this.version = version; } @@ -326,11 +319,6 @@ public String getIndex() { return index; } - @Override - public String getType() { - return type; - } - @Override public String getId() { return id; diff --git a/server/src/main/java/org/opensearch/index/reindex/UpdateByQueryRequest.java b/server/src/main/java/org/opensearch/index/reindex/UpdateByQueryRequest.java index 67675f1d998bd..92bcef8455e63 100644 --- a/server/src/main/java/org/opensearch/index/reindex/UpdateByQueryRequest.java +++ b/server/src/main/java/org/opensearch/index/reindex/UpdateByQueryRequest.java @@ -98,19 +98,6 @@ public UpdateByQueryRequest setQuery(QueryBuilder query) { return this; } - /** - * Set the document types for the update - * @deprecated Types are in the process of being removed. Instead of - * using a type, prefer to filter on a field of the document. - */ - @Deprecated - public UpdateByQueryRequest setDocTypes(String... types) { - if (types != null) { - getSearchRequest().types(types); - } - return this; - } - /** * Set routing limiting the process to the shards that match that routing value */ @@ -151,21 +138,6 @@ public String getRouting() { return getSearchRequest().routing(); } - /** - * Gets the document types on which this request would be executed. Returns an empty array if all - * types are to be processed. - * @deprecated Types are in the process of being removed. Instead of - * using a type, prefer to filter on a field of the document. - */ - @Deprecated - public String[] getDocTypes() { - if (getSearchRequest().types() != null) { - return getSearchRequest().types(); - } else { - return new String[0]; - } - } - /** * Ingest pipeline to set on index requests made by this action. */ diff --git a/server/src/main/java/org/opensearch/index/seqno/LocalCheckpointTracker.java b/server/src/main/java/org/opensearch/index/seqno/LocalCheckpointTracker.java index fa73498300c33..7aab597f8816c 100644 --- a/server/src/main/java/org/opensearch/index/seqno/LocalCheckpointTracker.java +++ b/server/src/main/java/org/opensearch/index/seqno/LocalCheckpointTracker.java @@ -230,9 +230,8 @@ public boolean hasProcessed(final long seqNo) { @SuppressForbidden(reason = "Object#notifyAll") private void updateCheckpoint(AtomicLong checkPoint, LongObjectHashMap bitSetMap) { assert Thread.holdsLock(this); - assert getBitSetForSeqNo(bitSetMap, checkPoint.get() + 1).get( - seqNoToBitSetOffset(checkPoint.get() + 1) - ) : "updateCheckpoint is called but the bit following the checkpoint is not set"; + assert getBitSetForSeqNo(bitSetMap, checkPoint.get() + 1).get(seqNoToBitSetOffset(checkPoint.get() + 1)) + : "updateCheckpoint is called but the bit following the checkpoint is not set"; try { // keep it simple for now, get the checkpoint one by one; in the future we can optimize and read words long bitSetKey = getBitSetKey(checkPoint.get()); diff --git a/server/src/main/java/org/opensearch/index/seqno/ReplicationTracker.java b/server/src/main/java/org/opensearch/index/seqno/ReplicationTracker.java index b2986ee3fccdd..52e858bfdaa92 100644 --- a/server/src/main/java/org/opensearch/index/seqno/ReplicationTracker.java +++ b/server/src/main/java/org/opensearch/index/seqno/ReplicationTracker.java @@ -845,23 +845,15 @@ private boolean invariant() { assert pendingInSync.isEmpty() || (primaryMode && !handoffInProgress); // the computed global checkpoint is always up-to-date - assert !primaryMode - || globalCheckpoint == computeGlobalCheckpoint( - pendingInSync, - checkpoints.values(), - globalCheckpoint - ) : "global checkpoint is not up-to-date, expected: " + assert !primaryMode || globalCheckpoint == computeGlobalCheckpoint(pendingInSync, checkpoints.values(), globalCheckpoint) + : "global checkpoint is not up-to-date, expected: " + computeGlobalCheckpoint(pendingInSync, checkpoints.values(), globalCheckpoint) + " but was: " + globalCheckpoint; // when in primary mode, the global checkpoint is at most the minimum local checkpoint on all in-sync shard copies - assert !primaryMode - || globalCheckpoint <= inSyncCheckpointStates( - checkpoints, - CheckpointState::getLocalCheckpoint, - LongStream::min - ) : "global checkpoint [" + assert !primaryMode || globalCheckpoint <= inSyncCheckpointStates(checkpoints, CheckpointState::getLocalCheckpoint, LongStream::min) + : "global checkpoint [" + globalCheckpoint + "] " + "for primary mode allocation ID [" @@ -877,11 +869,8 @@ private boolean invariant() { + " but replication group is " + replicationGroup; - assert replicationGroup == null - || replicationGroup.equals(calculateReplicationGroup()) : "cached replication group out of sync: expected: " - + calculateReplicationGroup() - + " but was: " - + replicationGroup; + assert replicationGroup == null || replicationGroup.equals(calculateReplicationGroup()) + : "cached replication group out of sync: expected: " + calculateReplicationGroup() + " but was: " + replicationGroup; // all assigned shards from the routing table are tracked assert routingTable == null || checkpoints.keySet().containsAll(routingTable.getAllAllocationIds()) : "local checkpoints " @@ -907,9 +896,8 @@ private boolean invariant() { // all tracked shard copies have a corresponding peer-recovery retention lease for (final ShardRouting shardRouting : routingTable.assignedShards()) { if (checkpoints.get(shardRouting.allocationId().getId()).tracked) { - assert retentionLeases.contains( - getPeerRecoveryRetentionLeaseId(shardRouting) - ) : "no retention lease for tracked shard [" + shardRouting + "] in " + retentionLeases; + assert retentionLeases.contains(getPeerRecoveryRetentionLeaseId(shardRouting)) + : "no retention lease for tracked shard [" + shardRouting + "] in " + retentionLeases; assert PEER_RECOVERY_RETENTION_LEASE_SOURCE.equals( retentionLeases.get(getPeerRecoveryRetentionLeaseId(shardRouting)).source() ) : "incorrect source [" @@ -1190,13 +1178,11 @@ public synchronized void updateFromMaster( if (applyingClusterStateVersion > appliedClusterStateVersion) { // check that the master does not fabricate new in-sync entries out of thin air once we are in primary mode assert !primaryMode - || inSyncAllocationIds.stream() - .allMatch( - inSyncId -> checkpoints.containsKey(inSyncId) && checkpoints.get(inSyncId).inSync - ) : "update from master in primary mode contains in-sync ids " - + inSyncAllocationIds - + " that have no matching entries in " - + checkpoints; + || inSyncAllocationIds.stream().allMatch(inSyncId -> checkpoints.containsKey(inSyncId) && checkpoints.get(inSyncId).inSync) + : "update from master in primary mode contains in-sync ids " + + inSyncAllocationIds + + " that have no matching entries in " + + checkpoints; // remove entries which don't exist on master Set initializingAllocationIds = routingTable.getAllInitializingShards() .stream() diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index b2f94f3d398ef..f2630ad05b488 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -51,9 +51,9 @@ import org.apache.lucene.util.SetOnce; import org.apache.lucene.util.ThreadInterruptedException; import org.opensearch.Assertions; +import org.opensearch.ExceptionsHelper; import org.opensearch.LegacyESVersion; import org.opensearch.OpenSearchException; -import org.opensearch.ExceptionsHelper; import org.opensearch.action.ActionListener; import org.opensearch.action.ActionRunnable; import org.opensearch.action.admin.indices.flush.FlushRequest; @@ -73,6 +73,7 @@ import org.opensearch.common.CheckedRunnable; import org.opensearch.common.Nullable; import org.opensearch.common.collect.Tuple; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lease.Releasables; @@ -121,7 +122,6 @@ import org.opensearch.index.mapper.DocumentMapper; import org.opensearch.index.mapper.DocumentMapperForType; import org.opensearch.index.mapper.IdFieldMapper; -import org.opensearch.index.mapper.MapperParsingException; import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.Mapping; import org.opensearch.index.mapper.ParsedDocument; @@ -153,7 +153,6 @@ import org.opensearch.index.warmer.WarmerStats; import org.opensearch.indices.IndexingMemoryController; import org.opensearch.indices.IndicesService; -import org.opensearch.indices.TypeMissingException; import org.opensearch.indices.breaker.CircuitBreakerService; import org.opensearch.indices.cluster.IndicesClusterStateService; import org.opensearch.indices.recovery.PeerRecoveryTargetService; @@ -516,9 +515,8 @@ public void updateShardState( assert currentRouting.active() == false : "we are in POST_RECOVERY, but our shard routing is active " + currentRouting; assert currentRouting.isRelocationTarget() == false || currentRouting.primary() == false - || replicationTracker - .isPrimaryMode() : "a primary relocation is completed by the master, but primary mode is not active " - + currentRouting; + || replicationTracker.isPrimaryMode() + : "a primary relocation is completed by the master, but primary mode is not active " + currentRouting; changeState(IndexShardState.STARTED, "global state is [" + newRouting.state() + "]"); } else if (currentRouting.primary() @@ -533,12 +531,8 @@ public void updateShardState( "Shard is marked as relocated, cannot safely move to state " + newRouting.state() ); } - assert newRouting.active() == false - || state == IndexShardState.STARTED - || state == IndexShardState.CLOSED : "routing is active, but local shard state isn't. routing: " - + newRouting - + ", local state: " - + state; + assert newRouting.active() == false || state == IndexShardState.STARTED || state == IndexShardState.CLOSED + : "routing is active, but local shard state isn't. routing: " + newRouting + ", local state: " + state; persistMetadata(path, indexSettings, newRouting, currentRouting, logger); final CountDownLatch shardStateUpdated = new CountDownLatch(1); @@ -726,8 +720,8 @@ public void relocated(final String targetAllocationId, final Consumer { forceRefreshes.close(); // no shard operation permits are being held here, move state from started to relocated - assert indexShardOperationPermits - .getActiveOperationsCount() == OPERATIONS_BLOCKED : "in-flight operations in progress while moving shard state to relocated"; + assert indexShardOperationPermits.getActiveOperationsCount() == OPERATIONS_BLOCKED + : "in-flight operations in progress while moving shard state to relocated"; /* * We should not invoke the runnable under the mutex as the expected implementation is to handoff the primary context via a * network operation. Doing this under the mutex can implicitly block the cluster state update thread on network operations. @@ -871,23 +865,9 @@ private Engine.IndexResult applyIndexOperation( ensureWriteAllowed(origin); Engine.Index operation; try { - final String resolvedType = mapperService.resolveDocumentType(sourceToParse.type()); - final SourceToParse sourceWithResolvedType; - if (resolvedType.equals(sourceToParse.type())) { - sourceWithResolvedType = sourceToParse; - } else { - sourceWithResolvedType = new SourceToParse( - sourceToParse.index(), - resolvedType, - sourceToParse.id(), - sourceToParse.source(), - sourceToParse.getXContentType(), - sourceToParse.routing() - ); - } operation = prepareIndex( - docMapper(resolvedType), - sourceWithResolvedType, + docMapper(), + sourceToParse, seqNo, opPrimaryTerm, version, @@ -957,8 +937,7 @@ private Engine.IndexResult index(Engine engine, Engine.Index index) throws IOExc if (logger.isTraceEnabled()) { // don't use index.source().utf8ToString() here source might not be valid UTF-8 logger.trace( - "index [{}][{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}]", - index.type(), + "index [{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}]", index.id(), index.seqNo(), routingEntry().allocationId(), @@ -970,9 +949,8 @@ private Engine.IndexResult index(Engine engine, Engine.Index index) throws IOExc result = engine.index(index); if (logger.isTraceEnabled()) { logger.trace( - "index-done [{}][{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}] " + "index-done [{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}] " + "result-seq# [{}] result-term [{}] failure [{}]", - index.type(), index.id(), index.seqNo(), routingEntry().allocationId(), @@ -988,8 +966,7 @@ private Engine.IndexResult index(Engine engine, Engine.Index index) throws IOExc if (logger.isTraceEnabled()) { logger.trace( new ParameterizedMessage( - "index-fail [{}][{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}]", - index.type(), + "index-fail [{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}]", index.id(), index.seqNo(), routingEntry().allocationId(), @@ -1042,7 +1019,6 @@ public Engine.DeleteResult getFailedDeleteResult(Exception e, long version) { public Engine.DeleteResult applyDeleteOperationOnPrimary( long version, - String type, String id, VersionType versionType, long ifSeqNo, @@ -1054,7 +1030,6 @@ public Engine.DeleteResult applyDeleteOperationOnPrimary( UNASSIGNED_SEQ_NO, getOperationPrimaryTerm(), version, - type, id, versionType, ifSeqNo, @@ -1063,14 +1038,12 @@ public Engine.DeleteResult applyDeleteOperationOnPrimary( ); } - public Engine.DeleteResult applyDeleteOperationOnReplica(long seqNo, long opPrimaryTerm, long version, String type, String id) - throws IOException { + public Engine.DeleteResult applyDeleteOperationOnReplica(long seqNo, long opPrimaryTerm, long version, String id) throws IOException { return applyDeleteOperation( getEngine(), seqNo, opPrimaryTerm, version, - type, id, null, UNASSIGNED_SEQ_NO, @@ -1084,7 +1057,6 @@ private Engine.DeleteResult applyDeleteOperation( long seqNo, long opPrimaryTerm, long version, - String type, String id, @Nullable VersionType versionType, long ifSeqNo, @@ -1097,54 +1069,12 @@ private Engine.DeleteResult applyDeleteOperation( + getOperationPrimaryTerm() + "]"; ensureWriteAllowed(origin); - // When there is a single type, the unique identifier is only composed of the _id, - // so there is no way to differentiate foo#1 from bar#1. This is especially an issue - // if a user first deletes foo#1 and then indexes bar#1: since we do not encode the - // _type in the uid it might look like we are reindexing the same document, which - // would fail if bar#1 is indexed with a lower version than foo#1 was deleted with. - // In order to work around this issue, we make deletions create types. This way, we - // fail if index and delete operations do not use the same type. - // TODO: clean this up when types are gone - try { - Mapping update = docMapper(type).getMapping(); - if (update != null) { - return new Engine.DeleteResult(update); - } - } catch (MapperParsingException | IllegalArgumentException | TypeMissingException e) { - return new Engine.DeleteResult(e, version, getOperationPrimaryTerm(), seqNo, false); - } - if (mapperService.resolveDocumentType(type).equals(mapperService.documentMapper().type()) == false) { - // We should never get there due to the fact that we generate mapping updates on deletes, - // but we still prefer to have a hard exception here as we would otherwise delete a - // document in the wrong type. - throw new IllegalStateException( - "Deleting document from type [" - + mapperService.resolveDocumentType(type) - + "] while current type is [" - + mapperService.documentMapper().type() - + "]" - ); - } - final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id)); - final Engine.Delete delete = prepareDelete( - type, - id, - uid, - seqNo, - opPrimaryTerm, - version, - versionType, - origin, - ifSeqNo, - ifPrimaryTerm - ); + final Engine.Delete delete = prepareDelete(id, seqNo, opPrimaryTerm, version, versionType, origin, ifSeqNo, ifPrimaryTerm); return delete(engine, delete); } - private Engine.Delete prepareDelete( - String type, + public static Engine.Delete prepareDelete( String id, - Term uid, long seqNo, long primaryTerm, long version, @@ -1154,19 +1084,8 @@ private Engine.Delete prepareDelete( long ifPrimaryTerm ) { long startTime = System.nanoTime(); - return new Engine.Delete( - mapperService.resolveDocumentType(type), - id, - uid, - seqNo, - primaryTerm, - version, - versionType, - origin, - startTime, - ifSeqNo, - ifPrimaryTerm - ); + final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id)); + return new Engine.Delete(id, uid, seqNo, primaryTerm, version, versionType, origin, startTime, ifSeqNo, ifPrimaryTerm); } private Engine.DeleteResult delete(Engine engine, Engine.Delete delete) throws IOException { @@ -1189,7 +1108,7 @@ private Engine.DeleteResult delete(Engine engine, Engine.Delete delete) throws I public Engine.GetResult get(Engine.Get get) { readAllowed(); DocumentMapper mapper = mapperService.documentMapper(); - if (mapper == null || mapper.type().equals(mapperService.resolveDocumentType(get.type())) == false) { + if (mapper == null) { return GetResult.NOT_EXISTS; } return getEngine().get(get, this::acquireSearcher); @@ -1253,7 +1172,7 @@ public SeqNoStats seqNoStats() { return getEngine().getSeqNoStats(replicationTracker.getGlobalCheckpoint()); } - public IndexingStats indexingStats(String... types) { + public IndexingStats indexingStats() { Engine engine = getEngineOrNull(); final boolean throttled; final long throttleTimeInMillis; @@ -1264,7 +1183,7 @@ public IndexingStats indexingStats(String... types) { throttled = engine.isThrottled(); throttleTimeInMillis = engine.getIndexThrottleTimeInMillis(); } - return internalIndexingStats.stats(throttled, throttleTimeInMillis, types); + return internalIndexingStats.stats(throttled, throttleTimeInMillis); } public SearchStats searchStats(String... groups) { @@ -1414,7 +1333,7 @@ public org.apache.lucene.util.Version minimumCompatibleVersion() { * * @param flushFirst true if the index should first be flushed to disk / a low level lucene commit should be executed */ - public Engine.IndexCommitRef acquireLastIndexCommit(boolean flushFirst) throws EngineException { + public GatedCloseable acquireLastIndexCommit(boolean flushFirst) throws EngineException { final IndexShardState state = this.state; // one time volatile read // we allow snapshot on closed index shard, since we want to do one after we close the shard and before we close the engine if (state == IndexShardState.STARTED || state == IndexShardState.CLOSED) { @@ -1428,7 +1347,7 @@ public Engine.IndexCommitRef acquireLastIndexCommit(boolean flushFirst) throws E * Snapshots the most recent safe index commit from the currently running engine. * All index files referenced by this index commit won't be freed until the commit/snapshot is closed. */ - public Engine.IndexCommitRef acquireSafeIndexCommit() throws EngineException { + public GatedCloseable acquireSafeIndexCommit() throws EngineException { final IndexShardState state = this.state; // one time volatile read // we allow snapshot on closed index shard, since we want to do one after we close the shard and before we close the engine if (state == IndexShardState.STARTED || state == IndexShardState.CLOSED) { @@ -1453,7 +1372,7 @@ public Engine.IndexCommitRef acquireSafeIndexCommit() throws EngineException { */ public Store.MetadataSnapshot snapshotStoreMetadata() throws IOException { assert Thread.holdsLock(mutex) == false : "snapshotting store metadata under mutex"; - Engine.IndexCommitRef indexCommit = null; + GatedCloseable wrappedIndexCommit = null; store.incRef(); try { synchronized (engineMutex) { @@ -1461,16 +1380,16 @@ public Store.MetadataSnapshot snapshotStoreMetadata() throws IOException { // the engine on us. If the engine is running, we can get a snapshot via the deletion policy of the engine. final Engine engine = getEngineOrNull(); if (engine != null) { - indexCommit = engine.acquireLastIndexCommit(false); + wrappedIndexCommit = engine.acquireLastIndexCommit(false); } - if (indexCommit == null) { + if (wrappedIndexCommit == null) { return store.getMetadata(null, true); } } - return store.getMetadata(indexCommit.getIndexCommit()); + return store.getMetadata(wrappedIndexCommit.get()); } finally { store.decRef(); - IOUtils.close(indexCommit); + IOUtils.close(wrappedIndexCommit); } } @@ -1516,9 +1435,8 @@ private Engine.Searcher acquireSearcher(String source, Engine.SearcherScope scop } private Engine.Searcher wrapSearcher(Engine.Searcher searcher) { - assert OpenSearchDirectoryReader.unwrap( - searcher.getDirectoryReader() - ) != null : "DirectoryReader must be an instance or OpenSearchDirectoryReader"; + assert OpenSearchDirectoryReader.unwrap(searcher.getDirectoryReader()) != null + : "DirectoryReader must be an instance or OpenSearchDirectoryReader"; boolean success = false; try { final Engine.Searcher newSearcher = readerWrapper == null ? searcher : wrapSearcher(searcher, readerWrapper); @@ -1818,7 +1736,6 @@ private Engine.Result applyTranslogOperation(Engine engine, Translog.Operation o origin, new SourceToParse( shardId.getIndexName(), - index.type(), index.id(), index.source(), XContentHelper.xContentType(index.source()), @@ -1833,7 +1750,6 @@ private Engine.Result applyTranslogOperation(Engine engine, Translog.Operation o delete.seqNo(), delete.primaryTerm(), delete.version(), - delete.type(), delete.id(), versionType, UNASSIGNED_SEQ_NO, @@ -1945,8 +1861,8 @@ private void innerOpenEngineAndTranslog(LongSupplier globalCheckpointSupplier) t // but we need to make sure we don't loose deletes until we are done recovering config.setEnableGcDeletes(false); updateRetentionLeasesOnReplica(loadRetentionLeases()); - assert recoveryState.getRecoverySource().expectEmptyRetentionLeases() == false - || getRetentionLeases().leases().isEmpty() : "expected empty set of retention leases with recovery source [" + assert recoveryState.getRecoverySource().expectEmptyRetentionLeases() == false || getRetentionLeases().leases().isEmpty() + : "expected empty set of retention leases with recovery source [" + recoveryState.getRecoverySource() + "] but got " + getRetentionLeases(); @@ -2085,9 +2001,8 @@ private void ensureWriteAllowed(Engine.Operation.Origin origin) throws IllegalIn assert assertReplicationTarget(); } else { assert origin == Engine.Operation.Origin.LOCAL_RESET; - assert getActiveOperationsCount() == OPERATIONS_BLOCKED : "locally resetting without blocking operations, active operations are [" - + getActiveOperations() - + "]"; + assert getActiveOperationsCount() == OPERATIONS_BLOCKED + : "locally resetting without blocking operations, active operations are [" + getActiveOperations() + "]"; } if (writeAllowedStates.contains(state) == false) { throw new IllegalIndexShardStateException( @@ -2189,7 +2104,7 @@ public ShardPath shardPath() { } public void recoverFromLocalShards( - BiConsumer mappingUpdateConsumer, + Consumer mappingUpdateConsumer, List localShards, ActionListener listener ) throws IOException { @@ -2311,42 +2226,26 @@ protected void doRun() { /** * Acquires a lock on the translog files and Lucene soft-deleted documents to prevent them from being trimmed */ - public Closeable acquireHistoryRetentionLock(Engine.HistorySource source) { - return getEngine().acquireHistoryRetentionLock(source); - } - - /** - * Returns the estimated number of history operations whose seq# at least the provided seq# in this shard. - */ - public int estimateNumberOfHistoryOperations(String reason, Engine.HistorySource source, long startingSeqNo) throws IOException { - return getEngine().estimateNumberOfHistoryOperations(reason, source, mapperService, startingSeqNo); + public Closeable acquireHistoryRetentionLock() { + return getEngine().acquireHistoryRetentionLock(); } /** - * Creates a new history snapshot for reading operations since the provided starting seqno (inclusive). - * The returned snapshot can be retrieved from either Lucene index or translog files. - */ - public Translog.Snapshot getHistoryOperations(String reason, Engine.HistorySource source, long startingSeqNo) throws IOException { - return getEngine().readHistoryOperations(reason, source, mapperService, startingSeqNo); - } - - /** - * * Creates a new history snapshot for reading operations since * the provided starting seqno (inclusive) and ending seqno (inclusive) * The returned snapshot can be retrieved from either Lucene index or translog files. */ - public Translog.Snapshot getHistoryOperations(String reason, Engine.HistorySource source, long startingSeqNo, long endSeqNo) + public Translog.Snapshot getHistoryOperations(String reason, long startingSeqNo, long endSeqNo, boolean accurateCount) throws IOException { - return getEngine().newChangesSnapshot(reason, source, mapperService, startingSeqNo, endSeqNo, true); + return getEngine().newChangesSnapshot(reason, startingSeqNo, endSeqNo, true, accurateCount); } /** * Checks if we have a completed history of operations since the given starting seqno (inclusive). - * This method should be called after acquiring the retention lock; See {@link #acquireHistoryRetentionLock(Engine.HistorySource)} + * This method should be called after acquiring the retention lock; See {@link #acquireHistoryRetentionLock()} */ - public boolean hasCompleteHistoryOperations(String reason, Engine.HistorySource source, long startingSeqNo) throws IOException { - return getEngine().hasCompleteOperationHistory(reason, source, mapperService, startingSeqNo); + public boolean hasCompleteHistoryOperations(String reason, long startingSeqNo) { + return getEngine().hasCompleteOperationHistory(reason, startingSeqNo); } /** @@ -2358,6 +2257,17 @@ public long getMinRetainedSeqNo() { return getEngine().getMinRetainedSeqNo(); } + /** + * Counts the number of history operations within the provided sequence numbers + * @param source source of the requester (e.g., peer-recovery) + * @param fromSeqNo from sequence number, included + * @param toSeqNo to sequence number, included + * @return number of history operations in the sequence number range + */ + public int countNumberOfHistoryOperations(String source, long fromSeqNo, long toSeqNo) throws IOException { + return getEngine().countNumberOfHistoryOperations(source, fromSeqNo, toSeqNo); + } + /** * Creates a new changes snapshot for reading operations whose seq_no are between {@code fromSeqNo}(inclusive) * and {@code toSeqNo}(inclusive). The caller has to close the returned snapshot after finishing the reading. @@ -2369,8 +2279,14 @@ public long getMinRetainedSeqNo() { * if any operation between {@code fromSeqNo} and {@code toSeqNo} is missing. * This parameter should be only enabled when the entire requesting range is below the global checkpoint. */ - public Translog.Snapshot newChangesSnapshot(String source, long fromSeqNo, long toSeqNo, boolean requiredFullRange) throws IOException { - return getEngine().newChangesSnapshot(source, mapperService, fromSeqNo, toSeqNo, requiredFullRange); + public Translog.Snapshot newChangesSnapshot( + String source, + long fromSeqNo, + long toSeqNo, + boolean requiredFullRange, + boolean accurateCount + ) throws IOException { + return getEngine().newChangesSnapshot(source, fromSeqNo, toSeqNo, requiredFullRange, accurateCount); } public List segments(boolean verbose) { @@ -2536,7 +2452,7 @@ public RetentionLease addRetentionLease( assert assertPrimaryMode(); verifyNotClosed(); ensureSoftDeletesEnabled("retention leases"); - try (Closeable ignore = acquireHistoryRetentionLock(Engine.HistorySource.INDEX)) { + try (Closeable ignore = acquireHistoryRetentionLock()) { final long actualRetainingSequenceNumber = retainingSequenceNumber == RETAIN_ALL ? getMinRetainedSeqNo() : retainingSequenceNumber; @@ -2559,7 +2475,7 @@ public RetentionLease renewRetentionLease(final String id, final long retainingS assert assertPrimaryMode(); verifyNotClosed(); ensureSoftDeletesEnabled("retention leases"); - try (Closeable ignore = acquireHistoryRetentionLock(Engine.HistorySource.INDEX)) { + try (Closeable ignore = acquireHistoryRetentionLock()) { final long actualRetainingSequenceNumber = retainingSequenceNumber == RETAIN_ALL ? getMinRetainedSeqNo() : retainingSequenceNumber; @@ -2793,8 +2709,8 @@ public void updateGlobalCheckpointOnReplica(final long globalCheckpoint, final S * while the global checkpoint update may have emanated from the primary when we were in that state, we could subsequently move * to recovery finalization, or even finished recovery before the update arrives here. */ - assert state() != IndexShardState.POST_RECOVERY - && state() != IndexShardState.STARTED : "supposedly in-sync shard copy received a global checkpoint [" + assert state() != IndexShardState.POST_RECOVERY && state() != IndexShardState.STARTED + : "supposedly in-sync shard copy received a global checkpoint [" + globalCheckpoint + "] " + "that is higher than its local checkpoint [" @@ -2811,9 +2727,8 @@ && state() != IndexShardState.STARTED : "supposedly in-sync shard copy received * @param primaryContext the sequence number context */ public void activateWithPrimaryContext(final ReplicationTracker.PrimaryContext primaryContext) { - assert shardRouting.primary() - && shardRouting.isRelocationTarget() : "only primary relocation target can update allocation IDs from primary context: " - + shardRouting; + assert shardRouting.primary() && shardRouting.isRelocationTarget() + : "only primary relocation target can update allocation IDs from primary context: " + shardRouting; assert primaryContext.getCheckpointStates().containsKey(routingEntry().allocationId().getId()) : "primary context [" + primaryContext + "] does not contain relocation target [" @@ -2855,11 +2770,9 @@ public boolean pendingInSync() { /** * Should be called for each no-op update operation to increment relevant statistics. - * - * @param type the doc type of the update */ - public void noopUpdate(String type) { - internalIndexingStats.noopUpdate(type); + public void noopUpdate() { + internalIndexingStats.noopUpdate(); } public void maybeCheckIndex() { @@ -2955,7 +2868,7 @@ public void startRecovery( PeerRecoveryTargetService recoveryTargetService, PeerRecoveryTargetService.RecoveryListener recoveryListener, RepositoriesService repositoriesService, - BiConsumer mappingUpdateConsumer, + Consumer mappingUpdateConsumer, IndicesService indicesService ) { // TODO: Create a proper object to encapsulate the recovery context @@ -3168,8 +3081,8 @@ private static void persistMetadata( } } - private DocumentMapperForType docMapper(String type) { - return mapperService.documentMapperWithAutoCreate(mapperService.resolveDocumentType(type)); + private DocumentMapperForType docMapper() { + return mapperService.documentMapperWithAutoCreate(); } private EngineConfig newEngineConfig(LongSupplier globalCheckpointSupplier) { @@ -3181,6 +3094,7 @@ private EngineConfig newEngineConfig(LongSupplier globalCheckpointSupplier) { this.warmer.warm(reader); } }; + return this.engineConfigFactory.newEngineConfig( shardId, threadPool, @@ -3190,7 +3104,7 @@ private EngineConfig newEngineConfig(LongSupplier globalCheckpointSupplier) { indexSettings.getMergePolicy(), mapperService != null ? mapperService.indexAnalyzer() : null, similarityService.similarity(mapperService), - codecService, + engineConfigFactory.newCodecServiceOrDefault(indexSettings, mapperService, logger, codecService), shardEventListener, indexCache != null ? indexCache.query() : null, cachingPolicy, @@ -3897,8 +3811,8 @@ private EngineConfig.TombstoneDocSupplier tombstoneDocSupplier() { : null; return new EngineConfig.TombstoneDocSupplier() { @Override - public ParsedDocument newDeleteTombstoneDoc(String type, String id) { - return docMapper(type).getDocumentMapper().createDeleteTombstoneDoc(shardId.getIndexName(), type, id); + public ParsedDocument newDeleteTombstoneDoc(String id) { + return docMapper().getDocumentMapper().createDeleteTombstoneDoc(shardId.getIndexName(), id); } @Override @@ -3938,7 +3852,7 @@ void resetEngineToGlobalCheckpoint() throws IOException { true ) { @Override - public IndexCommitRef acquireLastIndexCommit(boolean flushFirst) { + public GatedCloseable acquireLastIndexCommit(boolean flushFirst) { synchronized (engineMutex) { if (newEngineReference.get() == null) { throw new AlreadyClosedException("engine was closed"); @@ -3949,7 +3863,7 @@ public IndexCommitRef acquireLastIndexCommit(boolean flushFirst) { } @Override - public IndexCommitRef acquireSafeIndexCommit() { + public GatedCloseable acquireSafeIndexCommit() { synchronized (engineMutex) { if (newEngineReference.get() == null) { throw new AlreadyClosedException("engine was closed"); diff --git a/server/src/main/java/org/opensearch/index/shard/IndexingStats.java b/server/src/main/java/org/opensearch/index/shard/IndexingStats.java index 0f64f97a256ee..bdc5373e0b9b3 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexingStats.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexingStats.java @@ -32,7 +32,7 @@ package org.opensearch.index.shard; -import org.opensearch.common.Nullable; +import org.opensearch.Version; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.io.stream.Writeable; @@ -40,9 +40,9 @@ import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.ToXContentFragment; import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.index.mapper.MapperService; import java.io.IOException; -import java.util.HashMap; import java.util.Map; public class IndexingStats implements Writeable, ToXContentFragment { @@ -219,47 +219,30 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws private final Stats totalStats; - @Nullable - private Map typeStats; - public IndexingStats() { totalStats = new Stats(); } public IndexingStats(StreamInput in) throws IOException { totalStats = new Stats(in); - if (in.readBoolean()) { - typeStats = in.readMap(StreamInput::readString, Stats::new); + if (in.getVersion().before(Version.V_2_0_0)) { + if (in.readBoolean()) { + Map typeStats = in.readMap(StreamInput::readString, Stats::new); + assert typeStats.size() == 1; + assert typeStats.containsKey(MapperService.SINGLE_MAPPING_NAME); + } } } - public IndexingStats(Stats totalStats, @Nullable Map typeStats) { + public IndexingStats(Stats totalStats) { this.totalStats = totalStats; - this.typeStats = typeStats; } public void add(IndexingStats indexingStats) { - add(indexingStats, true); - } - - public void add(IndexingStats indexingStats, boolean includeTypes) { if (indexingStats == null) { return; } addTotals(indexingStats); - if (includeTypes && indexingStats.typeStats != null && !indexingStats.typeStats.isEmpty()) { - if (typeStats == null) { - typeStats = new HashMap<>(indexingStats.typeStats.size()); - } - for (Map.Entry entry : indexingStats.typeStats.entrySet()) { - Stats stats = typeStats.get(entry.getKey()); - if (stats == null) { - typeStats.put(entry.getKey(), entry.getValue()); - } else { - stats.add(entry.getValue()); - } - } - } } public void addTotals(IndexingStats indexingStats) { @@ -273,31 +256,16 @@ public Stats getTotal() { return this.totalStats; } - @Nullable - public Map getTypeStats() { - return this.typeStats; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(Fields.INDEXING); totalStats.toXContent(builder, params); - if (typeStats != null && !typeStats.isEmpty()) { - builder.startObject(Fields.TYPES); - for (Map.Entry entry : typeStats.entrySet()) { - builder.startObject(entry.getKey()); - entry.getValue().toXContent(builder, params); - builder.endObject(); - } - builder.endObject(); - } builder.endObject(); return builder; } static final class Fields { static final String INDEXING = "indexing"; - static final String TYPES = "types"; static final String INDEX_TOTAL = "index_total"; static final String INDEX_TIME = "index_time"; static final String INDEX_TIME_IN_MILLIS = "index_time_in_millis"; @@ -316,11 +284,8 @@ static final class Fields { @Override public void writeTo(StreamOutput out) throws IOException { totalStats.writeTo(out); - if (typeStats == null || typeStats.isEmpty()) { + if (out.getVersion().before(Version.V_2_0_0)) { out.writeBoolean(false); - } else { - out.writeBoolean(true); - out.writeMap(typeStats, StreamOutput::writeString, (stream, stats) -> stats.writeTo(stream)); } } } diff --git a/server/src/main/java/org/opensearch/index/shard/InternalIndexingStats.java b/server/src/main/java/org/opensearch/index/shard/InternalIndexingStats.java index ac5ae9a59fc66..76d64ab918163 100644 --- a/server/src/main/java/org/opensearch/index/shard/InternalIndexingStats.java +++ b/server/src/main/java/org/opensearch/index/shard/InternalIndexingStats.java @@ -32,56 +32,33 @@ package org.opensearch.index.shard; -import org.opensearch.common.collect.MapBuilder; import org.opensearch.common.metrics.CounterMetric; import org.opensearch.common.metrics.MeanMetric; -import org.opensearch.common.regex.Regex; import org.opensearch.index.engine.Engine; -import java.util.HashMap; -import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.emptyMap; - /** * Internal class that maintains relevant indexing statistics / metrics. * @see IndexShard */ final class InternalIndexingStats implements IndexingOperationListener { private final StatsHolder totalStats = new StatsHolder(); - private volatile Map typesStats = emptyMap(); /** * Returns the stats, including type specific stats. If the types are null/0 length, then nothing * is returned for them. If they are set, then only types provided will be returned, or * {@code _all} for all types. */ - IndexingStats stats(boolean isThrottled, long currentThrottleInMillis, String... types) { + IndexingStats stats(boolean isThrottled, long currentThrottleInMillis) { IndexingStats.Stats total = totalStats.stats(isThrottled, currentThrottleInMillis); - Map typesSt = null; - if (types != null && types.length > 0) { - typesSt = new HashMap<>(typesStats.size()); - if (types.length == 1 && types[0].equals("_all")) { - for (Map.Entry entry : typesStats.entrySet()) { - typesSt.put(entry.getKey(), entry.getValue().stats(isThrottled, currentThrottleInMillis)); - } - } else { - for (Map.Entry entry : typesStats.entrySet()) { - if (Regex.simpleMatch(types, entry.getKey())) { - typesSt.put(entry.getKey(), entry.getValue().stats(isThrottled, currentThrottleInMillis)); - } - } - } - } - return new IndexingStats(total, typesSt); + return new IndexingStats(total); } @Override public Engine.Index preIndex(ShardId shardId, Engine.Index operation) { if (operation.origin().isRecovery() == false) { totalStats.indexCurrent.inc(); - typeStats(operation.type()).indexCurrent.inc(); } return operation; } @@ -94,9 +71,6 @@ public void postIndex(ShardId shardId, Engine.Index index, Engine.IndexResult re long took = result.getTook(); totalStats.indexMetric.inc(took); totalStats.indexCurrent.dec(); - StatsHolder typeStats = typeStats(index.type()); - typeStats.indexMetric.inc(took); - typeStats.indexCurrent.dec(); } break; case FAILURE: @@ -111,9 +85,7 @@ public void postIndex(ShardId shardId, Engine.Index index, Engine.IndexResult re public void postIndex(ShardId shardId, Engine.Index index, Exception ex) { if (!index.origin().isRecovery()) { totalStats.indexCurrent.dec(); - typeStats(index.type()).indexCurrent.dec(); totalStats.indexFailed.inc(); - typeStats(index.type()).indexFailed.inc(); } } @@ -121,7 +93,6 @@ public void postIndex(ShardId shardId, Engine.Index index, Exception ex) { public Engine.Delete preDelete(ShardId shardId, Engine.Delete delete) { if (!delete.origin().isRecovery()) { totalStats.deleteCurrent.inc(); - typeStats(delete.type()).deleteCurrent.inc(); } return delete; @@ -135,9 +106,6 @@ public void postDelete(ShardId shardId, Engine.Delete delete, Engine.DeleteResul long took = result.getTook(); totalStats.deleteMetric.inc(took); totalStats.deleteCurrent.dec(); - StatsHolder typeStats = typeStats(delete.type()); - typeStats.deleteMetric.inc(took); - typeStats.deleteCurrent.dec(); } break; case FAILURE: @@ -152,27 +120,11 @@ public void postDelete(ShardId shardId, Engine.Delete delete, Engine.DeleteResul public void postDelete(ShardId shardId, Engine.Delete delete, Exception ex) { if (!delete.origin().isRecovery()) { totalStats.deleteCurrent.dec(); - typeStats(delete.type()).deleteCurrent.dec(); } } - public void noopUpdate(String type) { + void noopUpdate() { totalStats.noopUpdates.inc(); - typeStats(type).noopUpdates.inc(); - } - - private StatsHolder typeStats(String type) { - StatsHolder stats = typesStats.get(type); - if (stats == null) { - synchronized (this) { - stats = typesStats.get(type); - if (stats == null) { - stats = new StatsHolder(); - typesStats = MapBuilder.newMapBuilder(typesStats).put(type, stats).immutableMap(); - } - } - } - return stats; } static class StatsHolder { diff --git a/server/src/main/java/org/opensearch/index/shard/LocalShardSnapshot.java b/server/src/main/java/org/opensearch/index/shard/LocalShardSnapshot.java index 148c39df070e8..98556db3ae138 100644 --- a/server/src/main/java/org/opensearch/index/shard/LocalShardSnapshot.java +++ b/server/src/main/java/org/opensearch/index/shard/LocalShardSnapshot.java @@ -32,6 +32,7 @@ package org.opensearch.index.shard; +import org.apache.lucene.index.IndexCommit; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; import org.apache.lucene.store.IOContext; @@ -39,6 +40,7 @@ import org.apache.lucene.store.Lock; import org.apache.lucene.store.NoLockFactory; import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.index.Index; import org.opensearch.index.engine.Engine; import org.opensearch.index.store.Store; @@ -52,7 +54,7 @@ final class LocalShardSnapshot implements Closeable { private final IndexShard shard; private final Store store; - private final Engine.IndexCommitRef indexCommit; + private final GatedCloseable wrappedIndexCommit; private final AtomicBoolean closed = new AtomicBoolean(false); LocalShardSnapshot(IndexShard shard) { @@ -61,7 +63,7 @@ final class LocalShardSnapshot implements Closeable { store.incRef(); boolean success = false; try { - indexCommit = shard.acquireLastIndexCommit(true); + wrappedIndexCommit = shard.acquireLastIndexCommit(true); success = true; } finally { if (success == false) { @@ -88,7 +90,7 @@ Directory getSnapshotDirectory() { return new FilterDirectory(store.directory()) { @Override public String[] listAll() throws IOException { - Collection fileNames = indexCommit.getIndexCommit().getFileNames(); + Collection fileNames = wrappedIndexCommit.get().getFileNames(); final String[] fileNameArray = fileNames.toArray(new String[fileNames.size()]); return fileNameArray; } @@ -143,7 +145,7 @@ public Set getPendingDeletions() throws IOException { public void close() throws IOException { if (closed.compareAndSet(false, true)) { try { - indexCommit.close(); + wrappedIndexCommit.close(); } finally { store.decRef(); } @@ -156,6 +158,6 @@ IndexMetadata getIndexMetadata() { @Override public String toString() { - return "local_shard_snapshot:[" + shard.shardId() + " indexCommit: " + indexCommit + "]"; + return "local_shard_snapshot:[" + shard.shardId() + " indexCommit: " + wrappedIndexCommit + "]"; } } diff --git a/server/src/main/java/org/opensearch/index/shard/PrimaryReplicaSyncer.java b/server/src/main/java/org/opensearch/index/shard/PrimaryReplicaSyncer.java index b5e40881cfd43..726d2925177fa 100644 --- a/server/src/main/java/org/opensearch/index/shard/PrimaryReplicaSyncer.java +++ b/server/src/main/java/org/opensearch/index/shard/PrimaryReplicaSyncer.java @@ -49,7 +49,6 @@ import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.core.internal.io.IOUtils; -import org.opensearch.index.engine.Engine; import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.index.translog.Translog; import org.opensearch.tasks.Task; @@ -99,16 +98,13 @@ public void resync(final IndexShard indexShard, final ActionListener Translog.Snapshot snapshot = null; try { final long startingSeqNo = indexShard.getLastKnownGlobalCheckpoint() + 1; + assert startingSeqNo >= 0 : "startingSeqNo must be non-negative; got [" + startingSeqNo + "]"; final long maxSeqNo = indexShard.seqNoStats().getMaxSeqNo(); final ShardId shardId = indexShard.shardId(); // Wrap translog snapshot to make it synchronized as it is accessed by different threads through SnapshotSender. // Even though those calls are not concurrent, snapshot.next() uses non-synchronized state and is not multi-thread-compatible // Also fail the resync early if the shard is shutting down - snapshot = indexShard.getHistoryOperations( - "resync", - indexShard.indexSettings.isSoftDeleteEnabled() ? Engine.HistorySource.INDEX : Engine.HistorySource.TRANSLOG, - startingSeqNo - ); + snapshot = indexShard.newChangesSnapshot("resync", startingSeqNo, Long.MAX_VALUE, false, true); final Translog.Snapshot originalSnapshot = snapshot; final Translog.Snapshot wrappedSnapshot = new Translog.Snapshot() { @Override diff --git a/server/src/main/java/org/opensearch/index/shard/ReplicationGroup.java b/server/src/main/java/org/opensearch/index/shard/ReplicationGroup.java index 13aef9587aea6..fc52d09361281 100644 --- a/server/src/main/java/org/opensearch/index/shard/ReplicationGroup.java +++ b/server/src/main/java/org/opensearch/index/shard/ReplicationGroup.java @@ -85,9 +85,8 @@ public ReplicationGroup( replicationTargets.add(relocationTarget); } else { skippedShards.add(relocationTarget); - assert inSyncAllocationIds.contains( - relocationTarget.allocationId().getId() - ) == false : "in-sync shard copy but not tracked: " + shard; + assert inSyncAllocationIds.contains(relocationTarget.allocationId().getId()) == false + : "in-sync shard copy but not tracked: " + shard; } } } diff --git a/server/src/main/java/org/opensearch/index/shard/ShardPath.java b/server/src/main/java/org/opensearch/index/shard/ShardPath.java index 3d49a3c730700..39f86ea362bb1 100644 --- a/server/src/main/java/org/opensearch/index/shard/ShardPath.java +++ b/server/src/main/java/org/opensearch/index/shard/ShardPath.java @@ -62,17 +62,12 @@ public final class ShardPath { public ShardPath(boolean isCustomDataPath, Path dataPath, Path shardStatePath, ShardId shardId) { assert dataPath.getFileName().toString().equals(Integer.toString(shardId.id())) : "dataPath must end with the shard ID but didn't: " + dataPath.toString(); - assert shardStatePath.getFileName() - .toString() - .equals(Integer.toString(shardId.id())) : "shardStatePath must end with the shard ID but didn't: " + dataPath.toString(); - assert dataPath.getParent() - .getFileName() - .toString() - .equals(shardId.getIndex().getUUID()) : "dataPath must end with index path id but didn't: " + dataPath.toString(); - assert shardStatePath.getParent() - .getFileName() - .toString() - .equals(shardId.getIndex().getUUID()) : "shardStatePath must end with index path id but didn't: " + dataPath.toString(); + assert shardStatePath.getFileName().toString().equals(Integer.toString(shardId.id())) + : "shardStatePath must end with the shard ID but didn't: " + dataPath.toString(); + assert dataPath.getParent().getFileName().toString().equals(shardId.getIndex().getUUID()) + : "dataPath must end with index path id but didn't: " + dataPath.toString(); + assert shardStatePath.getParent().getFileName().toString().equals(shardId.getIndex().getUUID()) + : "shardStatePath must end with index path id but didn't: " + dataPath.toString(); if (isCustomDataPath && dataPath.equals(shardStatePath)) { throw new IllegalArgumentException("shard state path must be different to the data path when using custom data paths"); } diff --git a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java index 2b0f58ffe6b35..20bb6e7060ca3 100644 --- a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java +++ b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java @@ -32,7 +32,6 @@ package org.opensearch.index.shard; -import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; @@ -72,7 +71,7 @@ import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.BiConsumer; +import java.util.function.Consumer; import java.util.stream.Collectors; import static org.opensearch.common.unit.TimeValue.timeValueMillis; @@ -103,8 +102,8 @@ final class StoreRecovery { void recoverFromStore(final IndexShard indexShard, ActionListener listener) { if (canRecover(indexShard)) { RecoverySource.Type recoveryType = indexShard.recoveryState().getRecoverySource().getType(); - assert recoveryType == RecoverySource.Type.EMPTY_STORE - || recoveryType == RecoverySource.Type.EXISTING_STORE : "expected store recovery type but was: " + recoveryType; + assert recoveryType == RecoverySource.Type.EMPTY_STORE || recoveryType == RecoverySource.Type.EXISTING_STORE + : "expected store recovery type but was: " + recoveryType; ActionListener.completeWith(recoveryListener(indexShard, listener), () -> { logger.debug("starting recovery from store ..."); internalRecoverFromStore(indexShard); @@ -116,9 +115,9 @@ void recoverFromStore(final IndexShard indexShard, ActionListener liste } void recoverFromLocalShards( - BiConsumer mappingUpdateConsumer, + Consumer mappingUpdateConsumer, IndexShard indexShard, - List shards, + final List shards, ActionListener listener ) { if (canRecover(indexShard)) { @@ -132,8 +131,8 @@ void recoverFromLocalShards( throw new IllegalArgumentException("can't add shards from more than one index"); } IndexMetadata sourceMetadata = shards.get(0).getIndexMetadata(); - for (ObjectObjectCursor mapping : sourceMetadata.getMappings()) { - mappingUpdateConsumer.accept(mapping.key, mapping.value); + if (sourceMetadata.mapping() != null) { + mappingUpdateConsumer.accept(sourceMetadata.mapping()); } indexShard.mapperService().merge(sourceMetadata, MapperService.MergeReason.MAPPING_RECOVERY); // now that the mapping is merged we can validate the index sort configuration. diff --git a/server/src/main/java/org/opensearch/index/store/Store.java b/server/src/main/java/org/opensearch/index/store/Store.java index d44fd07ccb2bb..2b47c5845a394 100644 --- a/server/src/main/java/org/opensearch/index/store/Store.java +++ b/server/src/main/java/org/opensearch/index/store/Store.java @@ -1095,9 +1095,8 @@ public RecoveryDiff recoveryDiff(MetadataSnapshot recoveryTargetSnapshot) { Collections.unmodifiableList(different), Collections.unmodifiableList(missing) ); - assert recoveryDiff.size() == this.metadata.size() - (metadata.containsKey(IndexFileNames.OLD_SEGMENTS_GEN) - ? 1 - : 0) : "some files are missing recoveryDiff size: [" + assert recoveryDiff.size() == this.metadata.size() - (metadata.containsKey(IndexFileNames.OLD_SEGMENTS_GEN) ? 1 : 0) + : "some files are missing recoveryDiff size: [" + recoveryDiff.size() + "] metadata size: [" + this.metadata.size() @@ -1598,27 +1597,16 @@ public void ensureIndexHasHistoryUUID() throws IOException { * commit on the replica will cause exception as the new last commit c3 will have recovery_translog_gen=1. The recovery * translog generation of a commit is calculated based on the current local checkpoint. The local checkpoint of c3 is 1 * while the local checkpoint of c2 is 2. - *

        - * 3. Commit without translog can be used in recovery. An old index, which was created before multiple-commits is introduced - * (v6.2), may not have a safe commit. If that index has a snapshotted commit without translog and an unsafe commit, - * the policy can consider the snapshotted commit as a safe commit for recovery even the commit does not have translog. */ - public void trimUnsafeCommits( - final long lastSyncedGlobalCheckpoint, - final long minRetainedTranslogGen, - final org.opensearch.Version indexVersionCreated - ) throws IOException { + public void trimUnsafeCommits(final Path translogPath) throws IOException { metadataLock.writeLock().lock(); try { final List existingCommits = DirectoryReader.listCommits(directory); - if (existingCommits.isEmpty()) { - throw new IllegalArgumentException("No index found to trim"); - } - final IndexCommit lastIndexCommitCommit = existingCommits.get(existingCommits.size() - 1); - final String translogUUID = lastIndexCommitCommit.getUserData().get(Translog.TRANSLOG_UUID_KEY); - final IndexCommit startingIndexCommit; - // TODO: Asserts the starting commit is a safe commit once peer-recovery sets global checkpoint. - startingIndexCommit = CombinedDeletionPolicy.findSafeCommitPoint(existingCommits, lastSyncedGlobalCheckpoint); + assert existingCommits.isEmpty() == false : "No index found to trim"; + final IndexCommit lastIndexCommit = existingCommits.get(existingCommits.size() - 1); + final String translogUUID = lastIndexCommit.getUserData().get(Translog.TRANSLOG_UUID_KEY); + final long lastSyncedGlobalCheckpoint = Translog.readGlobalCheckpoint(translogPath, translogUUID); + final IndexCommit startingIndexCommit = CombinedDeletionPolicy.findSafeCommitPoint(existingCommits, lastSyncedGlobalCheckpoint); if (translogUUID.equals(startingIndexCommit.getUserData().get(Translog.TRANSLOG_UUID_KEY)) == false) { throw new IllegalStateException( @@ -1629,7 +1617,7 @@ public void trimUnsafeCommits( + "]" ); } - if (startingIndexCommit.equals(lastIndexCommitCommit) == false) { + if (startingIndexCommit.equals(lastIndexCommit) == false) { try (IndexWriter writer = newAppendingIndexWriter(directory, startingIndexCommit)) { // this achieves two things: // - by committing a new commit based on the starting commit, it make sure the starting commit will be opened diff --git a/server/src/main/java/org/opensearch/index/termvectors/TermVectorsService.java b/server/src/main/java/org/opensearch/index/termvectors/TermVectorsService.java index 9ba59264df7ad..3467a86c86c86 100644 --- a/server/src/main/java/org/opensearch/index/termvectors/TermVectorsService.java +++ b/server/src/main/java/org/opensearch/index/termvectors/TermVectorsService.java @@ -93,11 +93,7 @@ public static TermVectorsResponse getTermVectors(IndexShard indexShard, TermVect static TermVectorsResponse getTermVectors(IndexShard indexShard, TermVectorsRequest request, LongSupplier nanoTimeSupplier) { final long startTime = nanoTimeSupplier.getAsLong(); - final TermVectorsResponse termVectorsResponse = new TermVectorsResponse( - indexShard.shardId().getIndex().getName(), - request.type(), - request.id() - ); + final TermVectorsResponse termVectorsResponse = new TermVectorsResponse(indexShard.shardId().getIndex().getName(), request.id()); final Term uidTerm = new Term(IdFieldMapper.NAME, Uid.encodeId(request.id())); Fields termVectorsByField = null; @@ -110,7 +106,7 @@ static TermVectorsResponse getTermVectors(IndexShard indexShard, TermVectorsRequ try ( Engine.GetResult get = indexShard.get( - new Engine.Get(request.realtime(), false, request.type(), request.id(), uidTerm).version(request.version()) + new Engine.Get(request.realtime(), false, request.id(), uidTerm).version(request.version()) .versionType(request.versionType()) ); Engine.Searcher searcher = indexShard.acquireSearcher("term_vector") @@ -238,7 +234,7 @@ private static Fields addGeneratedTermVectors( /* generate term vectors from fetched document fields */ String[] getFields = validFields.toArray(new String[validFields.size() + 1]); getFields[getFields.length - 1] = SourceFieldMapper.NAME; - GetResult getResult = indexShard.getService().get(get, request.id(), request.type(), getFields, null); + GetResult getResult = indexShard.getService().get(get, request.id(), getFields, null); Fields generatedTermVectors = generateTermVectors( indexShard, getResult.sourceAsMap(), @@ -329,7 +325,6 @@ private static Fields generateTermVectorsFromDoc(IndexShard indexShard, TermVect ParsedDocument parsedDocument = parseDocument( indexShard, indexShard.shardId().getIndexName(), - request.type(), request.doc(), request.xContentType(), request.routing() @@ -389,15 +384,14 @@ public static String[] getValues(IndexableField[] fields) { private static ParsedDocument parseDocument( IndexShard indexShard, String index, - String type, BytesReference doc, XContentType xContentType, String routing ) { MapperService mapperService = indexShard.mapperService(); - DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(type); + DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(); ParsedDocument parsedDocument = docMapper.getDocumentMapper() - .parse(new SourceToParse(index, type, "_id_for_tv_api", doc, xContentType, routing)); + .parse(new SourceToParse(index, "_id_for_tv_api", doc, xContentType, routing)); if (docMapper.getMapping() != null) { parsedDocument.addDynamicMappingsUpdate(docMapper.getMapping()); } diff --git a/server/src/main/java/org/opensearch/index/translog/Translog.java b/server/src/main/java/org/opensearch/index/translog/Translog.java index ff16eb237a500..2586599d3ed59 100644 --- a/server/src/main/java/org/opensearch/index/translog/Translog.java +++ b/server/src/main/java/org/opensearch/index/translog/Translog.java @@ -33,9 +33,8 @@ package org.opensearch.index.translog; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.lucene.index.Term; import org.apache.lucene.store.AlreadyClosedException; -import org.opensearch.LegacyESVersion; +import org.opensearch.Version; import org.opensearch.common.Nullable; import org.opensearch.common.Strings; import org.opensearch.common.UUIDs; @@ -54,6 +53,9 @@ import org.opensearch.index.VersionType; import org.opensearch.index.engine.Engine; import org.opensearch.index.engine.MissingHistoryOperationsException; +import org.opensearch.index.mapper.IdFieldMapper; +import org.opensearch.index.mapper.MapperService; +import org.opensearch.index.mapper.Uid; import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.index.shard.AbstractIndexShardComponent; import org.opensearch.index.shard.IndexShardComponent; @@ -199,10 +201,8 @@ public Translog( // // For this to happen we must have already copied the translog.ckp file into translog-gen.ckp so we first check if that // file exists. If not we don't even try to clean it up and wait until we fail creating it - assert Files.exists(nextTranslogFile) == false - || Files.size(nextTranslogFile) <= TranslogHeader.headerSizeInBytes(translogUUID) : "unexpected translog file: [" - + nextTranslogFile - + "]"; + assert Files.exists(nextTranslogFile) == false || Files.size(nextTranslogFile) <= TranslogHeader.headerSizeInBytes(translogUUID) + : "unexpected translog file: [" + nextTranslogFile + "]"; if (Files.exists(currentCheckpointFile) // current checkpoint is already copied && Files.deleteIfExists(nextTranslogFile)) { // delete it and log a warning logger.warn( @@ -399,7 +399,8 @@ private static boolean calledFromOutsideOrViaTragedyClose() { @Override public void close() throws IOException { - assert calledFromOutsideOrViaTragedyClose() : "Translog.close method is called from inside Translog, but not via closeOnTragicEvent method"; + assert calledFromOutsideOrViaTragedyClose() + : "Translog.close method is called from inside Translog, but not via closeOnTragicEvent method"; if (closed.compareAndSet(false, true)) { try (ReleasableLock lock = writeLock.acquire()) { try { @@ -439,11 +440,8 @@ public long getMinFileGeneration() { if (readers.isEmpty()) { return current.getGeneration(); } else { - assert readers.stream() - .map(TranslogReader::getGeneration) - .min(Long::compareTo) - .get() - .equals(readers.get(0).getGeneration()) : "the first translog isn't the one with the minimum generation:" + readers; + assert readers.stream().map(TranslogReader::getGeneration).min(Long::compareTo).get().equals(readers.get(0).getGeneration()) + : "the first translog isn't the one with the minimum generation:" + readers; return readers.get(0).getGeneration(); } } @@ -740,10 +738,8 @@ private Snapshot newMultiSnapshot(TranslogSnapshot[] snapshots) throws IOExcepti if (snapshots.length == 0) { onClose = () -> {}; } else { - assert Arrays.stream(snapshots) - .map(BaseTranslogReader::getGeneration) - .min(Long::compareTo) - .get() == snapshots[0].generation : "first reader generation of " + snapshots + " is not the smallest"; + assert Arrays.stream(snapshots).map(BaseTranslogReader::getGeneration).min(Long::compareTo).get() == snapshots[0].generation + : "first reader generation of " + snapshots + " is not the smallest"; onClose = acquireTranslogGenFromDeletionPolicy(snapshots[0].generation); } boolean success = false; @@ -759,8 +755,8 @@ private Snapshot newMultiSnapshot(TranslogSnapshot[] snapshots) throws IOExcepti } private Stream readersAboveMinSeqNo(long minSeqNo) { - assert readLock.isHeldByCurrentThread() - || writeLock.isHeldByCurrentThread() : "callers of readersAboveMinSeqNo must hold a lock: readLock [" + assert readLock.isHeldByCurrentThread() || writeLock.isHeldByCurrentThread() + : "callers of readersAboveMinSeqNo must hold a lock: readLock [" + readLock.isHeldByCurrentThread() + "], writeLock [" + readLock.isHeldByCurrentThread() @@ -1198,11 +1194,10 @@ public static class Index implements Operation { public static final int FORMAT_6_0 = 8; // since 6.0.0 public static final int FORMAT_NO_PARENT = FORMAT_6_0 + 1; // since 7.0 public static final int FORMAT_NO_VERSION_TYPE = FORMAT_NO_PARENT + 1; - public static final int SERIALIZATION_FORMAT = FORMAT_NO_VERSION_TYPE; - + public static final int FORMAT_NO_DOC_TYPE = FORMAT_NO_VERSION_TYPE + 1; + public static final int SERIALIZATION_FORMAT = FORMAT_NO_DOC_TYPE; private final String id; private final long autoGeneratedIdTimestamp; - private final String type; private final long seqNo; private final long primaryTerm; private final long version; @@ -1213,7 +1208,10 @@ private Index(final StreamInput in) throws IOException { final int format = in.readVInt(); // SERIALIZATION_FORMAT assert format >= FORMAT_6_0 : "format was: " + format; id = in.readString(); - type = in.readString(); + if (format < FORMAT_NO_DOC_TYPE) { + in.readString(); + // can't assert that this is _doc because pre 2.0 indexes can have any name for a type + } source = in.readBytesReference(); routing = in.readOptionalString(); if (format < FORMAT_NO_PARENT) { @@ -1230,7 +1228,6 @@ private Index(final StreamInput in) throws IOException { public Index(Engine.Index index, Engine.IndexResult indexResult) { this.id = index.id(); - this.type = index.type(); this.source = index.source(); this.routing = index.routing(); this.seqNo = indexResult.getSeqNo(); @@ -1239,21 +1236,11 @@ public Index(Engine.Index index, Engine.IndexResult indexResult) { this.autoGeneratedIdTimestamp = index.getAutoGeneratedIdTimestamp(); } - public Index(String type, String id, long seqNo, long primaryTerm, byte[] source) { - this(type, id, seqNo, primaryTerm, Versions.MATCH_ANY, source, null, -1); + public Index(String id, long seqNo, long primaryTerm, byte[] source) { + this(id, seqNo, primaryTerm, Versions.MATCH_ANY, source, null, -1); } - public Index( - String type, - String id, - long seqNo, - long primaryTerm, - long version, - byte[] source, - String routing, - long autoGeneratedIdTimestamp - ) { - this.type = type; + public Index(String id, long seqNo, long primaryTerm, long version, byte[] source, String routing, long autoGeneratedIdTimestamp) { this.id = id; this.source = new BytesArray(source); this.seqNo = seqNo; @@ -1270,12 +1257,10 @@ public Type opType() { @Override public long estimateSize() { - return (2 * id.length()) + (2 * type.length()) + source.length() + (routing != null ? 2 * routing.length() : 0) + (4 - * Long.BYTES); // timestamp, seq_no, primary_term, and version - } - - public String type() { - return this.type; + return (2 * id.length()) + source.length() + (routing != null ? 2 * routing.length() : 0) + (4 * Long.BYTES); // timestamp, + // seq_no, + // primary_term, + // and version } public String id() { @@ -1310,10 +1295,12 @@ public Source getSource() { } private void write(final StreamOutput out) throws IOException { - final int format = out.getVersion().onOrAfter(LegacyESVersion.V_7_0_0) ? SERIALIZATION_FORMAT : FORMAT_6_0; + final int format = out.getVersion().onOrAfter(Version.V_2_0_0) ? SERIALIZATION_FORMAT : FORMAT_NO_VERSION_TYPE; out.writeVInt(format); out.writeString(id); - out.writeString(type); + if (format < FORMAT_NO_DOC_TYPE) { + out.writeString(MapperService.SINGLE_MAPPING_NAME); + } out.writeBytesReference(source); out.writeOptionalString(routing); if (format < FORMAT_NO_PARENT) { @@ -1343,7 +1330,6 @@ public boolean equals(Object o) { || seqNo != index.seqNo || primaryTerm != index.primaryTerm || id.equals(index.id) == false - || type.equals(index.type) == false || autoGeneratedIdTimestamp != index.autoGeneratedIdTimestamp || source.equals(index.source) == false) { return false; @@ -1358,7 +1344,6 @@ public boolean equals(Object o) { @Override public int hashCode() { int result = id.hashCode(); - result = 31 * result + type.hashCode(); result = 31 * result + Long.hashCode(seqNo); result = 31 * result + Long.hashCode(primaryTerm); result = 31 * result + Long.hashCode(version); @@ -1374,9 +1359,6 @@ public String toString() { + "id='" + id + '\'' - + ", type='" - + type - + '\'' + ", seqNo=" + seqNo + ", primaryTerm=" @@ -1399,10 +1381,10 @@ public static class Delete implements Operation { private static final int FORMAT_6_0 = 4; // 6.0 - * public static final int FORMAT_NO_PARENT = FORMAT_6_0 + 1; // since 7.0 public static final int FORMAT_NO_VERSION_TYPE = FORMAT_NO_PARENT + 1; - public static final int SERIALIZATION_FORMAT = FORMAT_NO_VERSION_TYPE; + public static final int FORMAT_NO_DOC_TYPE = FORMAT_NO_VERSION_TYPE + 1; + public static final int SERIALIZATION_FORMAT = FORMAT_NO_DOC_TYPE; - private final String type, id; - private final Term uid; + private final String id; private final long seqNo; private final long primaryTerm; private final long version; @@ -1410,9 +1392,16 @@ public static class Delete implements Operation { private Delete(final StreamInput in) throws IOException { final int format = in.readVInt();// SERIALIZATION_FORMAT assert format >= FORMAT_6_0 : "format was: " + format; - type = in.readString(); + if (format < FORMAT_NO_DOC_TYPE) { + in.readString(); + // Can't assert that this is _doc because pre 2.0 indexes can have any name for a type + } id = in.readString(); - uid = new Term(in.readString(), in.readBytesRef()); + if (format < FORMAT_NO_DOC_TYPE) { + final String docType = in.readString(); + assert docType.equals(IdFieldMapper.NAME) : docType + " != " + IdFieldMapper.NAME; + in.readBytesRef(); // uid + } this.version = in.readLong(); if (format < FORMAT_NO_VERSION_TYPE) { in.readByte(); // versionType @@ -1422,18 +1411,16 @@ private Delete(final StreamInput in) throws IOException { } public Delete(Engine.Delete delete, Engine.DeleteResult deleteResult) { - this(delete.type(), delete.id(), delete.uid(), deleteResult.getSeqNo(), delete.primaryTerm(), deleteResult.getVersion()); + this(delete.id(), deleteResult.getSeqNo(), delete.primaryTerm(), deleteResult.getVersion()); } /** utility for testing */ - public Delete(String type, String id, long seqNo, long primaryTerm, Term uid) { - this(type, id, uid, seqNo, primaryTerm, Versions.MATCH_ANY); + public Delete(String id, long seqNo, long primaryTerm) { + this(id, seqNo, primaryTerm, Versions.MATCH_ANY); } - public Delete(String type, String id, Term uid, long seqNo, long primaryTerm, long version) { - this.type = Objects.requireNonNull(type); + public Delete(String id, long seqNo, long primaryTerm, long version) { this.id = Objects.requireNonNull(id); - this.uid = uid; this.seqNo = seqNo; this.primaryTerm = primaryTerm; this.version = version; @@ -1446,22 +1433,14 @@ public Type opType() { @Override public long estimateSize() { - return (id.length() * 2) + (type.length() * 2) + ((uid.field().length() * 2) + (uid.text().length()) * 2) + (type.length() * 2) - + (3 * Long.BYTES); // seq_no, primary_term, and version; - } - - public String type() { - return type; + return (id.length() * 2) + (3 * Long.BYTES); // seq_no, primary_term, + // and version; } public String id() { return id; } - public Term uid() { - return this.uid; - } - @Override public long seqNo() { return seqNo; @@ -1482,12 +1461,16 @@ public Source getSource() { } private void write(final StreamOutput out) throws IOException { - final int format = out.getVersion().onOrAfter(LegacyESVersion.V_7_0_0) ? SERIALIZATION_FORMAT : FORMAT_6_0; + final int format = out.getVersion().onOrAfter(Version.V_2_0_0) ? SERIALIZATION_FORMAT : FORMAT_NO_VERSION_TYPE; out.writeVInt(format); - out.writeString(type); + if (format < FORMAT_NO_DOC_TYPE) { + out.writeString(MapperService.SINGLE_MAPPING_NAME); + } out.writeString(id); - out.writeString(uid.field()); - out.writeBytesRef(uid.bytes()); + if (format < FORMAT_NO_DOC_TYPE) { + out.writeString(IdFieldMapper.NAME); + out.writeBytesRef(Uid.encodeId(id)); + } out.writeLong(version); if (format < FORMAT_NO_VERSION_TYPE) { out.writeByte(VersionType.EXTERNAL.getValue()); @@ -1507,13 +1490,12 @@ public boolean equals(Object o) { Delete delete = (Delete) o; - return version == delete.version && seqNo == delete.seqNo && primaryTerm == delete.primaryTerm && uid.equals(delete.uid); + return version == delete.version && seqNo == delete.seqNo && primaryTerm == delete.primaryTerm; } @Override public int hashCode() { - int result = uid.hashCode(); - result = 31 * result + Long.hashCode(seqNo); + int result = Long.hashCode(seqNo); result = 31 * result + Long.hashCode(primaryTerm); result = 31 * result + Long.hashCode(version); return result; @@ -1521,7 +1503,7 @@ public int hashCode() { @Override public String toString() { - return "Delete{" + "uid=" + uid + ", seqNo=" + seqNo + ", primaryTerm=" + primaryTerm + ", version=" + version + '}'; + return "Delete{" + "seqNo=" + seqNo + ", primaryTerm=" + primaryTerm + ", version=" + version + '}'; } } @@ -1806,8 +1788,8 @@ public void trimUnreferencedReaders() throws IOException { current.sync(); deleteReaderFiles(reader); } - assert readers.isEmpty() == false - || current.generation == minReferencedGen : "all readers were cleaned but the minReferenceGen [" + assert readers.isEmpty() == false || current.generation == minReferencedGen + : "all readers were cleaned but the minReferenceGen [" + minReferencedGen + "] is not the current writer's gen [" + current.generation diff --git a/server/src/main/java/org/opensearch/index/translog/TranslogWriter.java b/server/src/main/java/org/opensearch/index/translog/TranslogWriter.java index 37c1d5d698408..66241f7b6847f 100644 --- a/server/src/main/java/org/opensearch/index/translog/TranslogWriter.java +++ b/server/src/main/java/org/opensearch/index/translog/TranslogWriter.java @@ -283,7 +283,6 @@ private synchronized boolean assertNoSeqNumberConflict(long seqNo, BytesReferenc final Translog.Index o1 = (Translog.Index) prvOp; final Translog.Index o2 = (Translog.Index) newOp; sameOp = Objects.equals(o1.id(), o2.id()) - && Objects.equals(o1.type(), o2.type()) && Objects.equals(o1.source(), o2.source()) && Objects.equals(o1.routing(), o2.routing()) && o1.primaryTerm() == o2.primaryTerm() @@ -293,7 +292,6 @@ private synchronized boolean assertNoSeqNumberConflict(long seqNo, BytesReferenc final Translog.Delete o1 = (Translog.Delete) newOp; final Translog.Delete o2 = (Translog.Delete) prvOp; sameOp = Objects.equals(o1.id(), o2.id()) - && Objects.equals(o1.type(), o2.type()) && o1.primaryTerm() == o2.primaryTerm() && o1.seqNo() == o2.seqNo() && o1.version() == o2.version(); diff --git a/server/src/main/java/org/opensearch/indices/IndicesService.java b/server/src/main/java/org/opensearch/indices/IndicesService.java index 0fa57bd3fb33f..5caafb0ce60d4 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesService.java +++ b/server/src/main/java/org/opensearch/indices/IndicesService.java @@ -849,14 +849,13 @@ public IndexShard createShard( RecoveryState recoveryState = indexService.createRecoveryState(shardRouting, targetNode, sourceNode); IndexShard indexShard = indexService.createShard(shardRouting, globalCheckpointSyncer, retentionLeaseSyncer); indexShard.addShardFailureCallback(onShardFailure); - indexShard.startRecovery(recoveryState, recoveryTargetService, recoveryListener, repositoriesService, (type, mapping) -> { - assert recoveryState.getRecoverySource() - .getType() == RecoverySource.Type.LOCAL_SHARDS : "mapping update consumer only required by local shards recovery"; + indexShard.startRecovery(recoveryState, recoveryTargetService, recoveryListener, repositoriesService, mapping -> { + assert recoveryState.getRecoverySource().getType() == RecoverySource.Type.LOCAL_SHARDS + : "mapping update consumer only required by local shards recovery"; client.admin() .indices() .preparePutMapping() .setConcreteIndex(shardRouting.index()) // concrete index - no name clash, it uses uuid - .setType(type) .setSource(mapping.source().string(), XContentType.JSON) .get(); }, this); @@ -1633,7 +1632,21 @@ public AliasFilter buildAliasFilter(ClusterState state, String index, Set PARSER = new ConstructingObjectParser<>("terms_lookup", args -> { String index = (String) args[0]; - String type = (String) args[1]; - String id = (String) args[2]; - String path = (String) args[3]; - return new TermsLookup(index, type, id, path); + String id = (String) args[1]; + String path = (String) args[2]; + return new TermsLookup(index, id, path); }); static { PARSER.declareString(constructorArg(), new ParseField("index")); - PARSER.declareString(optionalConstructorArg(), new ParseField("type").withAllDeprecated()); PARSER.declareString(constructorArg(), new ParseField("id")); PARSER.declareString(constructorArg(), new ParseField("path")); PARSER.declareString(TermsLookup::routing, new ParseField("routing")); @@ -170,19 +138,12 @@ public static TermsLookup parseTermsLookup(XContentParser parser) throws IOExcep @Override public String toString() { - if (type == null) { - return index + "/" + id + "/" + path; - } else { - return index + "/" + type + "/" + id + "/" + path; - } + return index + "/" + id + "/" + path; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field("index", index); - if (type != null) { - builder.field("type", type); - } builder.field("id", id); builder.field("path", path); if (routing != null) { @@ -193,7 +154,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public int hashCode() { - return Objects.hash(index, type, id, path, routing); + return Objects.hash(index, id, path, routing); } @Override @@ -206,7 +167,6 @@ public boolean equals(Object obj) { } TermsLookup other = (TermsLookup) obj; return Objects.equals(index, other.index) - && Objects.equals(type, other.type) && Objects.equals(id, other.id) && Objects.equals(path, other.path) && Objects.equals(routing, other.routing); diff --git a/server/src/main/java/org/opensearch/indices/cluster/IndicesClusterStateService.java b/server/src/main/java/org/opensearch/indices/cluster/IndicesClusterStateService.java index 9031230092a97..9463b51ca3792 100644 --- a/server/src/main/java/org/opensearch/indices/cluster/IndicesClusterStateService.java +++ b/server/src/main/java/org/opensearch/indices/cluster/IndicesClusterStateService.java @@ -636,13 +636,12 @@ private void updateShard( ClusterState clusterState ) { final ShardRouting currentRoutingEntry = shard.routingEntry(); - assert currentRoutingEntry.isSameAllocation( - shardRouting - ) : "local shard has a different allocation id but wasn't cleaned by removeShards. " - + "cluster state: " - + shardRouting - + " local: " - + currentRoutingEntry; + assert currentRoutingEntry.isSameAllocation(shardRouting) + : "local shard has a different allocation id but wasn't cleaned by removeShards. " + + "cluster state: " + + shardRouting + + " local: " + + currentRoutingEntry; final long primaryTerm; try { diff --git a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoverySourceService.java b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoverySourceService.java index a21ae475ba08a..5e9db2e1d67f3 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoverySourceService.java +++ b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoverySourceService.java @@ -260,8 +260,8 @@ synchronized void remove(IndexShard shard, RecoverySourceHandler handler) { if (removed != null) { shard.recoveryStats().decCurrentAsSource(); removed.cancel(); - assert nodeToHandlers.getOrDefault(removed.targetNode(), Collections.emptySet()) - .contains(removed) : "Remote recovery was not properly tracked [" + removed + "]"; + assert nodeToHandlers.getOrDefault(removed.targetNode(), Collections.emptySet()).contains(removed) + : "Remote recovery was not properly tracked [" + removed + "]"; nodeToHandlers.computeIfPresent(removed.targetNode(), (k, handlersForNode) -> { handlersForNode.remove(removed); if (handlersForNode.isEmpty()) { diff --git a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java index 8c959ab02b7b3..684c401716883 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java @@ -222,7 +222,7 @@ private void doRecovery(final long recoveryId, final StartRecoveryRequest preExi logger.trace("not running recovery with id [{}] - can not find it (probably finished)", recoveryId); return; } - final RecoveryTarget recoveryTarget = recoveryRef.target(); + final RecoveryTarget recoveryTarget = recoveryRef.get(); timer = recoveryTarget.state().getTimer(); cancellableThreads = recoveryTarget.cancellableThreads(); if (preExistingRequest == null) { @@ -233,12 +233,8 @@ private void doRecovery(final long recoveryId, final StartRecoveryRequest preExi logger.trace("{} preparing shard for peer recovery", recoveryTarget.shardId()); indexShard.prepareForIndexRecovery(); final long startingSeqNo = indexShard.recoverLocallyUpToGlobalCheckpoint(); - assert startingSeqNo == UNASSIGNED_SEQ_NO - || recoveryTarget.state().getStage() == RecoveryState.Stage.TRANSLOG : "unexpected recovery stage [" - + recoveryTarget.state().getStage() - + "] starting seqno [ " - + startingSeqNo - + "]"; + assert startingSeqNo == UNASSIGNED_SEQ_NO || recoveryTarget.state().getStage() == RecoveryState.Stage.TRANSLOG + : "unexpected recovery stage [" + recoveryTarget.state().getStage() + "] starting seqno [ " + startingSeqNo + "]"; startRequest = getStartRecoveryRequest(logger, clusterService.localNode(), recoveryTarget, startingSeqNo); requestToSend = startRequest; actionName = PeerRecoverySourceService.Actions.START_RECOVERY; @@ -367,7 +363,7 @@ public void messageReceived(RecoveryPrepareForTranslogOperationsRequest request, return; } - recoveryRef.target().prepareForTranslogOperations(request.totalTranslogOps(), listener); + recoveryRef.get().prepareForTranslogOperations(request.totalTranslogOps(), listener); } } } @@ -382,7 +378,7 @@ public void messageReceived(RecoveryFinalizeRecoveryRequest request, TransportCh return; } - recoveryRef.target().finalizeRecovery(request.globalCheckpoint(), request.trimAboveSeqNo(), listener); + recoveryRef.get().finalizeRecovery(request.globalCheckpoint(), request.trimAboveSeqNo(), listener); } } } @@ -393,7 +389,7 @@ class HandoffPrimaryContextRequestHandler implements TransportRequestHandler listener = createOrFinishListener( recoveryRef, channel, @@ -427,7 +423,7 @@ private void performTranslogOps( final ActionListener listener, final RecoveryRef recoveryRef ) { - final RecoveryTarget recoveryTarget = recoveryRef.target(); + final RecoveryTarget recoveryTarget = recoveryRef.get(); final ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger, threadPool.getThreadContext()); final Consumer retryOnMappingException = exception -> { @@ -469,18 +465,15 @@ public void onTimeout(TimeValue timeout) { request.maxSeqNoOfUpdatesOrDeletesOnPrimary(), request.retentionLeases(), request.mappingVersionOnPrimary(), - ActionListener.wrap( - checkpoint -> listener.onResponse(null), - e -> { - // do not retry if the mapping on replica is at least as recent as the mapping - // that the primary used to index the operations in the request. - if (mappingVersionOnTarget < request.mappingVersionOnPrimary() && e instanceof MapperException) { - retryOnMappingException.accept(e); - } else { - listener.onFailure(e); - } + ActionListener.wrap(checkpoint -> listener.onResponse(null), e -> { + // do not retry if the mapping on replica is at least as recent as the mapping + // that the primary used to index the operations in the request. + if (mappingVersionOnTarget < request.mappingVersionOnPrimary() && e instanceof MapperException) { + retryOnMappingException.accept(e); + } else { + listener.onFailure(e); } - ) + }) ); } } @@ -495,7 +488,7 @@ public void messageReceived(RecoveryFilesInfoRequest request, TransportChannel c return; } - recoveryRef.target() + recoveryRef.get() .receiveFileInfo( request.phase1FileNames, request.phase1FileSizes, @@ -518,7 +511,7 @@ public void messageReceived(RecoveryCleanFilesRequest request, TransportChannel return; } - recoveryRef.target() + recoveryRef.get() .cleanFiles(request.totalTranslogOps(), request.getGlobalCheckpoint(), request.sourceMetaSnapshot(), listener); } } @@ -532,7 +525,7 @@ class FileChunkTransportRequestHandler implements TransportRequestHandler listener = createOrFinishListener(recoveryRef, channel, Actions.FILE_CHUNK, request); if (listener == null) { return; @@ -582,7 +575,7 @@ private ActionListener createOrFinishListener( final RecoveryTransportRequest request, final CheckedFunction responseFn ) { - final RecoveryTarget recoveryTarget = recoveryRef.target(); + final RecoveryTarget recoveryTarget = recoveryRef.get(); final ActionListener channelListener = new ChannelActionListener<>(channel, action, request); final ActionListener voidListener = ActionListener.map(channelListener, responseFn); @@ -618,7 +611,7 @@ public void onFailure(Exception e) { logger.error(() -> new ParameterizedMessage("unexpected error during recovery [{}], failing shard", recoveryId), e); onGoingRecoveries.failRecovery( recoveryId, - new RecoveryFailedException(recoveryRef.target().state(), "unexpected error", e), + new RecoveryFailedException(recoveryRef.get().state(), "unexpected error", e), true // be safe ); } else { diff --git a/server/src/main/java/org/opensearch/indices/recovery/RecoveriesCollection.java b/server/src/main/java/org/opensearch/indices/recovery/RecoveriesCollection.java index 0fa2bc29c09fc..3c197a8e33eb6 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RecoveriesCollection.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RecoveriesCollection.java @@ -36,6 +36,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.opensearch.OpenSearchTimeoutException; import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.common.concurrent.GatedAutoCloseable; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.common.util.concurrent.ConcurrentCollections; @@ -48,7 +49,6 @@ import java.util.Iterator; import java.util.List; import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.atomic.AtomicBoolean; /** * This class holds a collection of all on going recoveries on the current node (i.e., the node is the target node @@ -178,7 +178,7 @@ public RecoveryRef getRecoverySafe(long id, ShardId shardId) { if (recoveryRef == null) { throw new IndexShardClosedException(shardId); } - assert recoveryRef.target().shardId().equals(shardId); + assert recoveryRef.get().shardId().equals(shardId); return recoveryRef; } @@ -273,29 +273,15 @@ public boolean cancelRecoveriesForShard(ShardId shardId, String reason) { * causes {@link RecoveryTarget#decRef()} to be called. This makes sure that the underlying resources * will not be freed until {@link RecoveryRef#close()} is called. */ - public static class RecoveryRef implements AutoCloseable { - - private final RecoveryTarget status; - private final AtomicBoolean closed = new AtomicBoolean(false); + public static class RecoveryRef extends GatedAutoCloseable { /** * Important: {@link RecoveryTarget#tryIncRef()} should * be *successfully* called on status before */ public RecoveryRef(RecoveryTarget status) { - this.status = status; - this.status.setLastAccessTime(); - } - - @Override - public void close() { - if (closed.compareAndSet(false, true)) { - status.decRef(); - } - } - - public RecoveryTarget target() { - return status; + super(status, status::decRef); + status.setLastAccessTime(); } } diff --git a/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java index 1bd659853e10e..77596f50a8a5e 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java @@ -57,6 +57,7 @@ import org.opensearch.common.StopWatch; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lease.Releasables; import org.opensearch.common.logging.Loggers; @@ -64,11 +65,10 @@ import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.CancellableThreads; -import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.common.util.concurrent.FutureUtils; import org.opensearch.common.util.concurrent.ListenableFuture; +import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.core.internal.io.IOUtils; -import org.opensearch.index.engine.Engine; import org.opensearch.index.engine.RecoveryEngineException; import org.opensearch.index.seqno.ReplicationTracker; import org.opensearch.index.seqno.RetentionLease; @@ -132,6 +132,7 @@ public class RecoverySourceHandler { private final CancellableThreads cancellableThreads = new CancellableThreads(); private final List resources = new CopyOnWriteArrayList<>(); private final ListenableFuture future = new ListenableFuture<>(); + public static final String PEER_RECOVERY_NAME = "peer-recovery"; public RecoverySourceHandler( IndexShard shard, @@ -187,7 +188,6 @@ public void recoverToTarget(ActionListener listener) { IOUtils.closeWhileHandlingException(releaseResources, () -> future.onFailure(e)); }; - final boolean softDeletesEnabled = shard.indexSettings().isSoftDeleteEnabled(); final SetOnce retentionLeaseRef = new SetOnce<>(); runUnderPrimaryPermit(() -> { @@ -211,19 +211,13 @@ public void recoverToTarget(ActionListener listener) { cancellableThreads, logger ); - final Engine.HistorySource historySource; - if (softDeletesEnabled && (shard.useRetentionLeasesInPeerRecovery() || retentionLeaseRef.get() != null)) { - historySource = Engine.HistorySource.INDEX; - } else { - historySource = Engine.HistorySource.TRANSLOG; - } - final Closeable retentionLock = shard.acquireHistoryRetentionLock(historySource); + final Closeable retentionLock = shard.acquireHistoryRetentionLock(); resources.add(retentionLock); final long startingSeqNo; final boolean isSequenceNumberBasedRecovery = request.startingSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO && isTargetSameHistory() - && shard.hasCompleteHistoryOperations("peer-recovery", historySource, request.startingSeqNo()) - && (historySource == Engine.HistorySource.TRANSLOG + && shard.hasCompleteHistoryOperations(PEER_RECOVERY_NAME, request.startingSeqNo()) + && ((retentionLeaseRef.get() == null && shard.useRetentionLeasesInPeerRecovery() == false) || (retentionLeaseRef.get() != null && retentionLeaseRef.get().retainingSequenceNumber() <= request.startingSeqNo())); // NB check hasCompleteHistoryOperations when computing isSequenceNumberBasedRecovery, even if there is a retention lease, // because when doing a rolling upgrade from earlier than 7.4 we may create some leases that are initially unsatisfied. It's @@ -231,7 +225,7 @@ && isTargetSameHistory() // Also it's pretty cheap when soft deletes are enabled, and it'd be a disaster if we tried a sequence-number-based recovery // without having a complete history. - if (isSequenceNumberBasedRecovery && softDeletesEnabled && retentionLeaseRef.get() != null) { + if (isSequenceNumberBasedRecovery && retentionLeaseRef.get() != null) { // all the history we need is retained by an existing retention lease, so we do not need a separate retention lock retentionLock.close(); logger.trace("history is retained by {}", retentionLeaseRef.get()); @@ -256,10 +250,10 @@ && isTargetSameHistory() sendFileStep.onResponse(SendFileResult.EMPTY); } } else { - final Engine.IndexCommitRef safeCommitRef; + final GatedCloseable wrappedSafeCommit; try { - safeCommitRef = acquireSafeCommit(shard); - resources.add(safeCommitRef); + wrappedSafeCommit = acquireSafeCommit(shard); + resources.add(wrappedSafeCommit); } catch (final Exception e) { throw new RecoveryEngineException(shard.shardId(), 1, "snapshot failed", e); } @@ -274,18 +268,16 @@ && isTargetSameHistory() // advances and not when creating a new safe commit. In any case this is a best-effort thing since future recoveries can // always fall back to file-based ones, and only really presents a problem if this primary fails before things have settled // down. - startingSeqNo = softDeletesEnabled - ? Long.parseLong(safeCommitRef.getIndexCommit().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) + 1L - : 0; + startingSeqNo = Long.parseLong(wrappedSafeCommit.get().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) + 1L; logger.trace("performing file-based recovery followed by history replay starting at [{}]", startingSeqNo); try { - final int estimateNumOps = shard.estimateNumberOfHistoryOperations("peer-recovery", historySource, startingSeqNo); + final int estimateNumOps = countNumberOfHistoryOperations(startingSeqNo); final Releasable releaseStore = acquireStore(shard.store()); resources.add(releaseStore); - sendFileStep.whenComplete(r -> IOUtils.close(safeCommitRef, releaseStore), e -> { + sendFileStep.whenComplete(r -> IOUtils.close(wrappedSafeCommit, releaseStore), e -> { try { - IOUtils.close(safeCommitRef, releaseStore); + IOUtils.close(wrappedSafeCommit, releaseStore); } catch (final IOException ex) { logger.warn("releasing snapshot caused exception", ex); } @@ -315,7 +307,7 @@ && isTargetSameHistory() deleteRetentionLeaseStep.whenComplete(ignored -> { assert Transports.assertNotTransportThread(RecoverySourceHandler.this + "[phase1]"); - phase1(safeCommitRef.getIndexCommit(), startingSeqNo, () -> estimateNumOps, sendFileStep); + phase1(wrappedSafeCommit.get(), startingSeqNo, () -> estimateNumOps, sendFileStep); }, onFailure); } catch (final Exception e) { @@ -327,10 +319,7 @@ && isTargetSameHistory() sendFileStep.whenComplete(r -> { assert Transports.assertNotTransportThread(RecoverySourceHandler.this + "[prepareTargetForTranslog]"); // For a sequence based recovery, the target can keep its local translog - prepareTargetForTranslog( - shard.estimateNumberOfHistoryOperations("peer-recovery", historySource, startingSeqNo), - prepareEngineStep - ); + prepareTargetForTranslog(countNumberOfHistoryOperations(startingSeqNo), prepareEngineStep); }, onFailure); prepareEngineStep.whenComplete(prepareEngineTime -> { @@ -350,11 +339,16 @@ && isTargetSameHistory() ); final long endingSeqNo = shard.seqNoStats().getMaxSeqNo(); - logger.trace( - "snapshot translog for recovery; current size is [{}]", - shard.estimateNumberOfHistoryOperations("peer-recovery", historySource, startingSeqNo) + if (logger.isTraceEnabled()) { + logger.trace("snapshot translog for recovery; current size is [{}]", countNumberOfHistoryOperations(startingSeqNo)); + } + final Translog.Snapshot phase2Snapshot = shard.newChangesSnapshot( + PEER_RECOVERY_NAME, + startingSeqNo, + Long.MAX_VALUE, + false, + true ); - final Translog.Snapshot phase2Snapshot = shard.getHistoryOperations("peer-recovery", historySource, startingSeqNo); resources.add(phase2Snapshot); retentionLock.close(); @@ -415,6 +409,15 @@ private boolean isTargetSameHistory() { return targetHistoryUUID.equals(shard.getHistoryUUID()); } + /** + * Counts the number of history operations from the starting sequence number + * @param startingSeqNo the starting sequence number to count; included + * @return number of history operations + */ + private int countNumberOfHistoryOperations(long startingSeqNo) throws IOException { + return shard.countNumberOfHistoryOperations(PEER_RECOVERY_NAME, startingSeqNo, Long.MAX_VALUE); + } + static void runUnderPrimaryPermit( CancellableThreads.Interruptible runnable, String reason, @@ -473,12 +476,12 @@ private Releasable acquireStore(Store store) { * with the file systems due to interrupt (see {@link org.apache.lucene.store.NIOFSDirectory} javadocs for more detail). * This method acquires a safe commit and wraps it to make sure that it will be released using the generic thread pool. */ - private Engine.IndexCommitRef acquireSafeCommit(IndexShard shard) { - final Engine.IndexCommitRef commitRef = shard.acquireSafeIndexCommit(); + private GatedCloseable acquireSafeCommit(IndexShard shard) { + final GatedCloseable wrappedSafeCommit = shard.acquireSafeIndexCommit(); final AtomicBoolean closed = new AtomicBoolean(false); - return new Engine.IndexCommitRef(commitRef.getIndexCommit(), () -> { + return new GatedCloseable<>(wrappedSafeCommit.get(), () -> { if (closed.compareAndSet(false, true)) { - runWithGenericThreadPool(commitRef::close); + runWithGenericThreadPool(wrappedSafeCommit::close); } }); } @@ -644,10 +647,8 @@ void phase1(IndexCommit snapshot, long startingSeqNo, IntSupplier translogOps, A createRetentionLeaseStep.whenComplete(retentionLease -> { final long lastKnownGlobalCheckpoint = shard.getLastKnownGlobalCheckpoint(); - assert retentionLease == null - || retentionLease.retainingSequenceNumber() - 1 <= lastKnownGlobalCheckpoint : retentionLease - + " vs " - + lastKnownGlobalCheckpoint; + assert retentionLease == null || retentionLease.retainingSequenceNumber() - 1 <= lastKnownGlobalCheckpoint + : retentionLease + " vs " + lastKnownGlobalCheckpoint; // Establishes new empty translog on the replica with global checkpoint set to lastKnownGlobalCheckpoint. We want // the commit we just copied to be a safe commit on the replica, so why not set the global checkpoint on the replica // to the max seqno of this commit? Because (in rare corner cases) this commit might not be a safe commit here on diff --git a/server/src/main/java/org/opensearch/indices/recovery/RecoveryState.java b/server/src/main/java/org/opensearch/indices/recovery/RecoveryState.java index aa1f3cf858587..d89d59e2f2c1b 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RecoveryState.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RecoveryState.java @@ -138,11 +138,8 @@ public RecoveryState(ShardRouting shardRouting, DiscoveryNode targetNode, @Nulla public RecoveryState(ShardRouting shardRouting, DiscoveryNode targetNode, @Nullable DiscoveryNode sourceNode, Index index) { assert shardRouting.initializing() : "only allow initializing shard routing to be recovered: " + shardRouting; RecoverySource recoverySource = shardRouting.recoverySource(); - assert (recoverySource - .getType() == RecoverySource.Type.PEER) == (sourceNode != null) : "peer recovery requires source node, recovery type: " - + recoverySource.getType() - + " source node: " - + sourceNode; + assert (recoverySource.getType() == RecoverySource.Type.PEER) == (sourceNode != null) + : "peer recovery requires source node, recovery type: " + recoverySource.getType() + " source node: " + sourceNode; this.shardId = shardRouting.shardId(); this.primary = shardRouting.primary(); this.recoverySource = recoverySource; diff --git a/server/src/main/java/org/opensearch/indices/recovery/RecoveryTarget.java b/server/src/main/java/org/opensearch/indices/recovery/RecoveryTarget.java index b4bcec3273379..394b093059385 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RecoveryTarget.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RecoveryTarget.java @@ -344,10 +344,10 @@ public void finalizeRecovery(final long globalCheckpoint, final long trimAboveSe private boolean hasUncommittedOperations() throws IOException { long localCheckpointOfCommit = Long.parseLong(indexShard.commitStats().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)); - return indexShard.estimateNumberOfHistoryOperations( - "peer-recovery", - indexShard.indexSettings().isSoftDeleteEnabled() ? Engine.HistorySource.INDEX : Engine.HistorySource.TRANSLOG, - localCheckpointOfCommit + 1 + return indexShard.countNumberOfHistoryOperations( + RecoverySourceHandler.PEER_RECOVERY_NAME, + localCheckpointOfCommit + 1, + Long.MAX_VALUE ) > 0; } diff --git a/server/src/main/java/org/opensearch/indices/recovery/StartRecoveryRequest.java b/server/src/main/java/org/opensearch/indices/recovery/StartRecoveryRequest.java index dd4d7d99e978c..f2efae2fbe798 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/StartRecoveryRequest.java +++ b/server/src/main/java/org/opensearch/indices/recovery/StartRecoveryRequest.java @@ -98,8 +98,8 @@ public StartRecoveryRequest( this.metadataSnapshot = metadataSnapshot; this.primaryRelocation = primaryRelocation; this.startingSeqNo = startingSeqNo; - assert startingSeqNo == SequenceNumbers.UNASSIGNED_SEQ_NO - || metadataSnapshot.getHistoryUUID() != null : "starting seq no is set but not history uuid"; + assert startingSeqNo == SequenceNumbers.UNASSIGNED_SEQ_NO || metadataSnapshot.getHistoryUUID() != null + : "starting seq no is set but not history uuid"; } public long recoveryId() { diff --git a/server/src/main/java/org/opensearch/ingest/IngestDocument.java b/server/src/main/java/org/opensearch/ingest/IngestDocument.java index 820763dde43cb..b496799c34dd0 100644 --- a/server/src/main/java/org/opensearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/opensearch/ingest/IngestDocument.java @@ -76,19 +76,10 @@ public final class IngestDocument { // Contains all pipelines that have been executed for this document private final Set executedPipelines = new LinkedHashSet<>(); - public IngestDocument( - String index, - String type, - String id, - String routing, - Long version, - VersionType versionType, - Map source - ) { + public IngestDocument(String index, String id, String routing, Long version, VersionType versionType, Map source) { this.sourceAndMetadata = new HashMap<>(); this.sourceAndMetadata.putAll(source); this.sourceAndMetadata.put(Metadata.INDEX.getFieldName(), index); - this.sourceAndMetadata.put(Metadata.TYPE.getFieldName(), type); this.sourceAndMetadata.put(Metadata.ID.getFieldName(), id); if (routing != null) { this.sourceAndMetadata.put(Metadata.ROUTING.getFieldName(), routing); diff --git a/server/src/main/java/org/opensearch/ingest/IngestService.java b/server/src/main/java/org/opensearch/ingest/IngestService.java index f50d437c26fdb..cbd5fa71b27de 100644 --- a/server/src/main/java/org/opensearch/ingest/IngestService.java +++ b/server/src/main/java/org/opensearch/ingest/IngestService.java @@ -722,13 +722,12 @@ private void innerExecute( // (e.g. the pipeline may have been removed while we're ingesting a document totalMetrics.preIngest(); String index = indexRequest.index(); - String type = indexRequest.type(); String id = indexRequest.id(); String routing = indexRequest.routing(); Long version = indexRequest.version(); VersionType versionType = indexRequest.versionType(); Map sourceAsMap = indexRequest.sourceAsMap(); - IngestDocument ingestDocument = new IngestDocument(index, type, id, routing, version, versionType, sourceAsMap); + IngestDocument ingestDocument = new IngestDocument(index, id, routing, version, versionType, sourceAsMap); ingestDocument.executePipeline(pipeline, (result, e) -> { long ingestTimeInMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTimeInNanos); totalMetrics.postIngest(ingestTimeInMillis); @@ -743,7 +742,6 @@ private void innerExecute( // it's fine to set all metadata fields all the time, as ingest document holds their starting values // before ingestion, which might also get modified during ingestion. indexRequest.index((String) metadataMap.get(IngestDocument.Metadata.INDEX)); - indexRequest.type((String) metadataMap.get(IngestDocument.Metadata.TYPE)); indexRequest.id((String) metadataMap.get(IngestDocument.Metadata.ID)); indexRequest.routing((String) metadataMap.get(IngestDocument.Metadata.ROUTING)); indexRequest.version(((Number) metadataMap.get(IngestDocument.Metadata.VERSION)).longValue()); diff --git a/server/src/main/java/org/opensearch/ingest/TrackingResultProcessor.java b/server/src/main/java/org/opensearch/ingest/TrackingResultProcessor.java index e9920b1332a18..efcb3b8d9840f 100644 --- a/server/src/main/java/org/opensearch/ingest/TrackingResultProcessor.java +++ b/server/src/main/java/org/opensearch/ingest/TrackingResultProcessor.java @@ -100,59 +100,56 @@ public void execute(IngestDocument ingestDocument, BiConsumer { - // special handling for pipeline cycle errors - if (e instanceof OpenSearchException - && e.getCause() instanceof IllegalStateException - && e.getCause().getMessage().startsWith(PIPELINE_CYCLE_ERROR_MESSAGE)) { - if (ignoreFailure) { - processorResultList.add( - new SimulateProcessorResult( - pipelineProcessor.getType(), - pipelineProcessor.getTag(), - pipelineProcessor.getDescription(), - new IngestDocument(ingestDocument), - e, - conditionalWithResult - ) - ); - } else { - processorResultList.add( - new SimulateProcessorResult( - pipelineProcessor.getType(), - pipelineProcessor.getTag(), - pipelineProcessor.getDescription(), - e, - conditionalWithResult - ) - ); - } - handler.accept(null, e); - } else { - // now that we know that there are no cycles between pipelines, decorate the processors for this pipeline and - // execute it - CompoundProcessor verbosePipelineProcessor = decorate(pipeline.getCompoundProcessor(), null, processorResultList); - // add the pipeline process to the results + ingestDocumentCopy.executePipeline(pipelineToCall, (result, e) -> { + // special handling for pipeline cycle errors + if (e instanceof OpenSearchException + && e.getCause() instanceof IllegalStateException + && e.getCause().getMessage().startsWith(PIPELINE_CYCLE_ERROR_MESSAGE)) { + if (ignoreFailure) { processorResultList.add( new SimulateProcessorResult( - actualProcessor.getType(), - actualProcessor.getTag(), - actualProcessor.getDescription(), + pipelineProcessor.getType(), + pipelineProcessor.getTag(), + pipelineProcessor.getDescription(), + new IngestDocument(ingestDocument), + e, conditionalWithResult ) ); - Pipeline verbosePipeline = new Pipeline( - pipeline.getId(), - pipeline.getDescription(), - pipeline.getVersion(), - verbosePipelineProcessor + } else { + processorResultList.add( + new SimulateProcessorResult( + pipelineProcessor.getType(), + pipelineProcessor.getTag(), + pipelineProcessor.getDescription(), + e, + conditionalWithResult + ) ); - ingestDocument.executePipeline(verbosePipeline, handler); } + handler.accept(null, e); + } else { + // now that we know that there are no cycles between pipelines, decorate the processors for this pipeline and + // execute it + CompoundProcessor verbosePipelineProcessor = decorate(pipeline.getCompoundProcessor(), null, processorResultList); + // add the pipeline process to the results + processorResultList.add( + new SimulateProcessorResult( + actualProcessor.getType(), + actualProcessor.getTag(), + actualProcessor.getDescription(), + conditionalWithResult + ) + ); + Pipeline verbosePipeline = new Pipeline( + pipeline.getId(), + pipeline.getDescription(), + pipeline.getVersion(), + verbosePipelineProcessor + ); + ingestDocument.executePipeline(verbosePipeline, handler); } - ); + }); return; } diff --git a/server/src/main/java/org/opensearch/monitor/os/OsProbe.java b/server/src/main/java/org/opensearch/monitor/os/OsProbe.java index 5097d1b0f4c05..eda86c49539dd 100644 --- a/server/src/main/java/org/opensearch/monitor/os/OsProbe.java +++ b/server/src/main/java/org/opensearch/monitor/os/OsProbe.java @@ -51,6 +51,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -401,6 +402,7 @@ private OsStats.Cgroup.CpuStat getCgroupCpuAcctCpuStat(final String controlGroup long numberOfPeriods = -1; long numberOfTimesThrottled = -1; long timeThrottledNanos = -1; + for (final String line : lines) { final String[] fields = line.split("\\s+"); switch (fields[0]) { @@ -415,9 +417,17 @@ private OsStats.Cgroup.CpuStat getCgroupCpuAcctCpuStat(final String controlGroup break; } } - assert numberOfPeriods != -1; - assert numberOfTimesThrottled != -1; - assert timeThrottledNanos != -1; + if (isCpuStatWarningsLogged.getAndSet(true) == false) { + if (numberOfPeriods == -1) { + logger.warn("Expected to see nr_periods filed but found nothing"); + } + if (numberOfTimesThrottled == -1) { + logger.warn("Expected to see nr_throttled filed but found nothing"); + } + if (timeThrottledNanos == -1) { + logger.warn("Expected to see throttled_time filed but found nothing"); + } + } return new OsStats.Cgroup.CpuStat(numberOfPeriods, numberOfTimesThrottled, timeThrottledNanos); } @@ -440,7 +450,7 @@ private OsStats.Cgroup.CpuStat getCgroupCpuAcctCpuStat(final String controlGroup @SuppressForbidden(reason = "access /sys/fs/cgroup/cpu") List readSysFsCgroupCpuAcctCpuStat(final String controlGroup) throws IOException { final List lines = Files.readAllLines(PathUtils.get("/sys/fs/cgroup/cpu", controlGroup, "cpu.stat")); - assert lines != null && lines.size() == 3; + assert lines != null && lines.isEmpty() == false; return lines; } @@ -588,11 +598,18 @@ public static OsProbe getInstance() { return OsProbeHolder.INSTANCE; } - OsProbe() { + private final Logger logger; + + private AtomicBoolean isCpuStatWarningsLogged = new AtomicBoolean(false); + OsProbe() { + this(LogManager.getLogger(OsProbe.class)); } - private final Logger logger = LogManager.getLogger(getClass()); + /*For testing purpose*/ + OsProbe(final Logger logger) { + this.logger = logger; + } OsInfo osInfo(long refreshInterval, int allocatedProcessors) throws IOException { return new OsInfo( diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index ae2872789819f..060f5b23fb504 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -1059,8 +1059,8 @@ public Node start() throws NodeValidationException { transportService.getTaskManager().setTaskCancellationService(new TaskCancellationService(transportService)); transportService.start(); assert localNodeFactory.getNode() != null; - assert transportService.getLocalNode() - .equals(localNodeFactory.getNode()) : "transportService has a different local node than the factory provided"; + assert transportService.getLocalNode().equals(localNodeFactory.getNode()) + : "transportService has a different local node than the factory provided"; injector.getInstance(PeerRecoverySourceService.class).start(); // Load (and maybe upgrade) the metadata stored on disk @@ -1103,8 +1103,8 @@ public Node start() throws NodeValidationException { // start after transport service so the local disco is known discovery.start(); // start before cluster service so that it can set initial state on ClusterApplierService clusterService.start(); - assert clusterService.localNode() - .equals(localNodeFactory.getNode()) : "clusterService has a different local node than the factory provided"; + assert clusterService.localNode().equals(localNodeFactory.getNode()) + : "clusterService has a different local node than the factory provided"; transportService.acceptIncomingRequests(); discovery.startInitialJoin(); final TimeValue initialStateTimeout = DiscoverySettings.INITIAL_STATE_TIMEOUT_SETTING.get(settings()); diff --git a/server/src/main/java/org/opensearch/plugins/EnginePlugin.java b/server/src/main/java/org/opensearch/plugins/EnginePlugin.java index ee285e8be8c2f..4c3a07d7b98d9 100644 --- a/server/src/main/java/org/opensearch/plugins/EnginePlugin.java +++ b/server/src/main/java/org/opensearch/plugins/EnginePlugin.java @@ -34,6 +34,7 @@ import org.opensearch.index.IndexSettings; import org.opensearch.index.codec.CodecService; +import org.opensearch.index.codec.CodecServiceFactory; import org.opensearch.index.engine.EngineFactory; import org.opensearch.index.seqno.RetentionLeases; import org.opensearch.index.translog.TranslogDeletionPolicy; @@ -63,11 +64,26 @@ public interface EnginePlugin { * to determine if a custom {@link CodecService} should be provided for the given index. A plugin that is not overriding * the {@link CodecService} through the plugin can ignore this method and the Codec specified in the {@link IndexSettings} * will be used. + * + * @deprecated Please use {@code getCustomCodecServiceFactory()} instead as it provides more context for {@link CodecService} + * instance construction. */ + @Deprecated default Optional getCustomCodecService(IndexSettings indexSettings) { return Optional.empty(); } + /** + * EXPERT: + * When an index is created this method is invoked for each engine plugin. Engine plugins can inspect the index settings + * to determine if a custom {@link CodecServiceFactory} should be provided for the given index. A plugin that is not overriding + * the {@link CodecServiceFactory} through the plugin can ignore this method and the default Codec specified in the + * {@link IndexSettings} will be used. + */ + default Optional getCustomCodecServiceFactory(IndexSettings indexSettings) { + return Optional.empty(); + } + /** * When an index is created this method is invoked for each engine plugin. Engine plugins that need to provide a * custom {@link TranslogDeletionPolicy} can override this method to return a function that takes the {@link IndexSettings} diff --git a/server/src/main/java/org/opensearch/repositories/IndexMetaDataGenerations.java b/server/src/main/java/org/opensearch/repositories/IndexMetaDataGenerations.java index 5adc86bc84679..4ea0217c5870f 100644 --- a/server/src/main/java/org/opensearch/repositories/IndexMetaDataGenerations.java +++ b/server/src/main/java/org/opensearch/repositories/IndexMetaDataGenerations.java @@ -66,11 +66,8 @@ public final class IndexMetaDataGenerations { final Map identifiers; IndexMetaDataGenerations(Map> lookup, Map identifiers) { - assert identifiers.keySet() - .equals(lookup.values().stream().flatMap(m -> m.values().stream()).collect(Collectors.toSet())) : "identifier mappings " - + identifiers - + " don't track the same blob ids as the lookup map " - + lookup; + assert identifiers.keySet().equals(lookup.values().stream().flatMap(m -> m.values().stream()).collect(Collectors.toSet())) + : "identifier mappings " + identifiers + " don't track the same blob ids as the lookup map " + lookup; assert lookup.values().stream().noneMatch(Map::isEmpty) : "Lookup contained empty map [" + lookup + "]"; this.lookup = Collections.unmodifiableMap(lookup); this.identifiers = Collections.unmodifiableMap(identifiers); diff --git a/server/src/main/java/org/opensearch/repositories/RepositoryData.java b/server/src/main/java/org/opensearch/repositories/RepositoryData.java index 7b085c961ba23..7857df512cd75 100644 --- a/server/src/main/java/org/opensearch/repositories/RepositoryData.java +++ b/server/src/main/java/org/opensearch/repositories/RepositoryData.java @@ -168,11 +168,8 @@ private RepositoryData( + shardGenerations.indices() + " but snapshots only reference indices " + indices.values(); - assert indexSnapshots.values() - .stream() - .noneMatch( - snapshotIdList -> new HashSet<>(snapshotIdList).size() != snapshotIdList.size() - ) : "Found duplicate snapshot ids per index in [" + indexSnapshots + "]"; + assert indexSnapshots.values().stream().noneMatch(snapshotIdList -> new HashSet<>(snapshotIdList).size() != snapshotIdList.size()) + : "Found duplicate snapshot ids per index in [" + indexSnapshots + "]"; } protected RepositoryData copy() { @@ -355,8 +352,8 @@ public RepositoryData addSnapshot( + "]"; newIndexMetaGenerations = IndexMetaDataGenerations.EMPTY; } else { - assert indexMetaBlobs.isEmpty() - || shardGenerations.indices().equals(indexMetaBlobs.keySet()) : "Shard generations contained indices " + assert indexMetaBlobs.isEmpty() || shardGenerations.indices().equals(indexMetaBlobs.keySet()) + : "Shard generations contained indices " + shardGenerations.indices() + " but indexMetaData was given for " + indexMetaBlobs.keySet(); diff --git a/server/src/main/java/org/opensearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/opensearch/repositories/blobstore/BlobStoreRepository.java index c64c8bb035a33..7d6cdef76198f 100644 --- a/server/src/main/java/org/opensearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/opensearch/repositories/blobstore/BlobStoreRepository.java @@ -1909,13 +1909,8 @@ public ClusterState execute(ClusterState currentState) { meta.pendingGeneration() ); } - assert expectedGen == RepositoryData.EMPTY_REPO_GEN - || uninitializedMeta - || expectedGen == meta.generation() : "Expected non-empty generation [" - + expectedGen - + "] does not match generation tracked in [" - + meta - + "]"; + assert expectedGen == RepositoryData.EMPTY_REPO_GEN || uninitializedMeta || expectedGen == meta.generation() + : "Expected non-empty generation [" + expectedGen + "] does not match generation tracked in [" + meta + "]"; // If we run into the empty repo generation for the expected gen, the repo is assumed to have been cleared of // all contents by an external process so we reset the safe generation to the empty generation. final long safeGeneration = expectedGen == RepositoryData.EMPTY_REPO_GEN diff --git a/server/src/main/java/org/opensearch/rest/BaseRestHandler.java b/server/src/main/java/org/opensearch/rest/BaseRestHandler.java index f2e345314ee10..4ee209111bdcb 100644 --- a/server/src/main/java/org/opensearch/rest/BaseRestHandler.java +++ b/server/src/main/java/org/opensearch/rest/BaseRestHandler.java @@ -80,13 +80,6 @@ public abstract class BaseRestHandler implements RestHandler { @Deprecated protected Logger logger = LogManager.getLogger(getClass()); - /** - * Parameter that controls whether certain REST apis should include type names in their requests or responses. - * Note: Support for this parameter will be removed after the transition period to typeless APIs. - */ - public static final String INCLUDE_TYPE_NAME_PARAMETER = "include_type_name"; - public static final boolean DEFAULT_INCLUDE_TYPE_NAME_POLICY = false; - public final long getUsageCount() { return usageCount.sum(); } diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestNodesStatsAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestNodesStatsAction.java index 972c5284b382f..3c66b0740536f 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestNodesStatsAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestNodesStatsAction.java @@ -193,9 +193,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC if (nodesStatsRequest.indices().isSet(Flag.Search) && (request.hasParam("groups"))) { nodesStatsRequest.indices().groups(request.paramAsStringArray("groups", null)); } - if (nodesStatsRequest.indices().isSet(Flag.Indexing) && (request.hasParam("types"))) { - nodesStatsRequest.indices().types(request.paramAsStringArray("types", null)); - } if (nodesStatsRequest.indices().isSet(Flag.Segments)) { nodesStatsRequest.indices().includeSegmentFileSizes(request.paramAsBoolean("include_segment_file_sizes", false)); } diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestCreateIndexAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestCreateIndexAction.java index 6cc72388758a2..5b628bc094c41 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestCreateIndexAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestCreateIndexAction.java @@ -35,7 +35,6 @@ import org.opensearch.action.admin.indices.create.CreateIndexRequest; import org.opensearch.action.support.ActiveShardCount; import org.opensearch.client.node.NodeClient; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.index.mapper.MapperService; @@ -53,9 +52,6 @@ import static org.opensearch.rest.RestRequest.Method.PUT; public class RestCreateIndexAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestCreateIndexAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in create " - + "index requests is deprecated. The parameter will be removed in the next major version."; @Override public List routes() { @@ -69,17 +65,11 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - deprecationLogger.deprecate("create_index_with_types", TYPES_DEPRECATION_MESSAGE); - } - CreateIndexRequest createIndexRequest = new CreateIndexRequest(request.param("index")); if (request.hasContent()) { Map sourceAsMap = XContentHelper.convertToMap(request.requiredContent(), false, request.getXContentType()).v2(); - sourceAsMap = prepareMappings(sourceAsMap, includeTypeName); + sourceAsMap = prepareMappings(sourceAsMap); createIndexRequest.source(sourceAsMap, LoggingDeprecationHandler.INSTANCE); } @@ -89,8 +79,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC return channel -> client.admin().indices().create(createIndexRequest, new RestToXContentListener<>(channel)); } - static Map prepareMappings(Map source, boolean includeTypeName) { - if (includeTypeName || source.containsKey("mappings") == false || (source.get("mappings") instanceof Map) == false) { + static Map prepareMappings(Map source) { + if (source.containsKey("mappings") == false || (source.get("mappings") instanceof Map) == false) { return source; } @@ -99,12 +89,7 @@ static Map prepareMappings(Map source, boolean i @SuppressWarnings("unchecked") Map mappings = (Map) source.get("mappings"); if (MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, mappings)) { - throw new IllegalArgumentException( - "The mapping definition cannot be nested under a type " - + "[" - + MapperService.SINGLE_MAPPING_NAME - + "] unless include_type_name is set to true." - ); + throw new IllegalArgumentException("The mapping definition cannot be nested under a type"); } newSource.put("mappings", singletonMap(MapperService.SINGLE_MAPPING_NAME, mappings)); diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingAction.java index f5c5f926df36c..c35f417795377 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingAction.java @@ -60,23 +60,12 @@ import static org.opensearch.rest.RestStatus.OK; public class RestGetFieldMappingAction extends BaseRestHandler { - private static final Logger logger = LogManager.getLogger(RestGetFieldMappingAction.class); private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(logger.getName()); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in get " - + "field mapping requests is deprecated. The parameter will be removed in the next major version."; @Override public List routes() { - return unmodifiableList( - asList( - new Route(GET, "/_mapping/field/{fields}"), - new Route(GET, "/_mapping/{type}/field/{fields}"), - new Route(GET, "/{index}/_mapping/field/{fields}"), - new Route(GET, "/{index}/{type}/_mapping/field/{fields}"), - new Route(GET, "/{index}/_mapping/{type}/field/{fields}") - ) - ); + return unmodifiableList(asList(new Route(GET, "/_mapping/field/{fields}"), new Route(GET, "/{index}/_mapping/field/{fields}"))); } @Override @@ -87,19 +76,10 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); - final String[] types = request.paramAsStringArrayOrEmptyIfAll("type"); final String[] fields = Strings.splitStringByCommaToArray(request.param("fields")); - boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - if (includeTypeName == false && types.length > 0) { - throw new IllegalArgumentException("Types cannot be specified unless include_type_name" + " is set to true."); - } - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - deprecationLogger.deprecate("get_field_mapping_with_types", TYPES_DEPRECATION_MESSAGE); - } - GetFieldMappingsRequest getMappingsRequest = new GetFieldMappingsRequest(); - getMappingsRequest.indices(indices).types(types).fields(fields).includeDefaults(request.paramAsBoolean("include_defaults", false)); + getMappingsRequest.indices(indices).fields(fields).includeDefaults(request.paramAsBoolean("include_defaults", false)); getMappingsRequest.indicesOptions(IndicesOptions.fromRequest(request, getMappingsRequest.indicesOptions())); if (request.hasParam("local")) { @@ -114,12 +94,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC .getFieldMappings(getMappingsRequest, new RestBuilderListener(channel) { @Override public RestResponse buildResponse(GetFieldMappingsResponse response, XContentBuilder builder) throws Exception { - Map>> mappingsByIndex = response.mappings(); - - boolean isPossibleSingleFieldRequest = indices.length == 1 && types.length == 1 && fields.length == 1; - if (isPossibleSingleFieldRequest && isFieldMappingMissingField(mappingsByIndex)) { - return new BytesRestResponse(OK, builder.startObject().endObject()); - } + Map> mappingsByIndex = response.mappings(); RestStatus status = OK; if (mappingsByIndex.isEmpty() && fields.length > 0) { @@ -131,24 +106,4 @@ public RestResponse buildResponse(GetFieldMappingsResponse response, XContentBui }); } - /** - * Helper method to find out if the only included fieldmapping metadata is typed NULL, which means - * that type and index exist, but the field did not - */ - private boolean isFieldMappingMissingField(Map>> mappingsByIndex) { - if (mappingsByIndex.size() != 1) { - return false; - } - - for (Map> value : mappingsByIndex.values()) { - for (Map fieldValue : value.values()) { - for (Map.Entry fieldMappingMetadataEntry : fieldValue.entrySet()) { - if (fieldMappingMetadataEntry.getValue().isNull()) { - return true; - } - } - } - } - return false; - } } diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndexTemplateAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndexTemplateAction.java index de985cb9c372b..71e7ed098cf8d 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndexTemplateAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndexTemplateAction.java @@ -36,16 +36,13 @@ import org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Strings; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.set.Sets; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; import org.opensearch.rest.RestStatus; import org.opensearch.rest.action.RestToXContentListener; import java.io.IOException; -import java.util.Collections; import java.util.List; import java.util.Set; @@ -61,13 +58,6 @@ */ public class RestGetIndexTemplateAction extends BaseRestHandler { - private static final Set RESPONSE_PARAMETERS = Collections.unmodifiableSet( - Sets.union(Collections.singleton(INCLUDE_TYPE_NAME_PARAMETER), Settings.FORMAT_PARAMS) - ); - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetIndexTemplateAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" - + " Specifying include_type_name in get index template requests is deprecated."; - @Override public List routes() { return unmodifiableList( @@ -85,9 +75,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC final String[] names = Strings.splitStringByCommaToArray(request.param("name")); final GetIndexTemplatesRequest getIndexTemplatesRequest = new GetIndexTemplatesRequest(names); - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - deprecationLogger.deprecate("get_index_template_include_type_name", TYPES_DEPRECATION_MESSAGE); - } getIndexTemplatesRequest.local(request.paramAsBoolean("local", getIndexTemplatesRequest.local())); getIndexTemplatesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getIndexTemplatesRequest.masterNodeTimeout())); @@ -106,7 +93,7 @@ protected RestStatus getStatus(final GetIndexTemplatesResponse response) { @Override protected Set responseParams() { - return RESPONSE_PARAMETERS; + return Settings.FORMAT_PARAMS; } } diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndicesAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndicesAction.java index 0647221c8b6a0..37c8162c6d31b 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndicesAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndicesAction.java @@ -36,18 +36,14 @@ import org.opensearch.action.support.IndicesOptions; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Strings; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.settings.Settings; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; import org.opensearch.rest.action.RestToXContentListener; import java.io.IOException; -import java.util.Collections; import java.util.List; import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; import static java.util.Arrays.asList; import static java.util.Collections.unmodifiableList; @@ -59,15 +55,6 @@ */ public class RestGetIndicesAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetIndicesAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using `include_type_name` in get indices requests" - + " is deprecated. The parameter will be removed in the next major version."; - - private static final Set allowedResponseParameters = Collections.unmodifiableSet( - Stream.concat(Collections.singleton(INCLUDE_TYPE_NAME_PARAMETER).stream(), Settings.FORMAT_PARAMS.stream()) - .collect(Collectors.toSet()) - ); - @Override public List routes() { return unmodifiableList(asList(new Route(GET, "/{index}"), new Route(HEAD, "/{index}"))); @@ -81,10 +68,6 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { String[] indices = Strings.splitStringByCommaToArray(request.param("index")); - // starting with 7.0 we don't include types by default in the response to GET requests - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER) && request.method().equals(GET)) { - deprecationLogger.deprecate("get_indices_with_types", TYPES_DEPRECATION_MESSAGE); - } final GetIndexRequest getIndexRequest = new GetIndexRequest(); getIndexRequest.indices(indices); getIndexRequest.indicesOptions(IndicesOptions.fromRequest(request, getIndexRequest.indicesOptions())); @@ -101,6 +84,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC */ @Override protected Set responseParams() { - return allowedResponseParameters; + return Settings.FORMAT_PARAMS; } } diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetMappingAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetMappingAction.java index 66b047ad9691c..f4f33905408e7 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetMappingAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetMappingAction.java @@ -32,24 +32,15 @@ package org.opensearch.rest.action.admin.indices; -import com.carrotsearch.hppc.cursors.ObjectCursor; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.opensearch.OpenSearchTimeoutException; import org.opensearch.action.ActionRunnable; import org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.opensearch.action.support.IndicesOptions; import org.opensearch.client.node.NodeClient; -import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.Strings; -import org.opensearch.common.collect.ImmutableOpenMap; -import org.opensearch.common.logging.DeprecationLogger; -import org.opensearch.common.regex.Regex; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.set.Sets; import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.indices.TypeMissingException; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestRequest; @@ -60,25 +51,13 @@ import org.opensearch.threadpool.ThreadPool; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashSet; import java.util.List; -import java.util.Locale; -import java.util.Set; -import java.util.SortedSet; -import java.util.stream.Collectors; import static java.util.Arrays.asList; import static java.util.Collections.unmodifiableList; import static org.opensearch.rest.RestRequest.Method.GET; -import static org.opensearch.rest.RestRequest.Method.HEAD; public class RestGetMappingAction extends BaseRestHandler { - private static final Logger logger = LogManager.getLogger(RestGetMappingAction.class); - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(logger.getName()); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in get" - + " mapping requests is deprecated. The parameter will be removed in the next major version."; private final ThreadPool threadPool; @@ -92,13 +71,8 @@ public List routes() { asList( new Route(GET, "/_mapping"), new Route(GET, "/_mappings"), - new Route(GET, "/{index}/{type}/_mapping"), new Route(GET, "/{index}/_mapping"), - new Route(GET, "/{index}/_mappings"), - new Route(GET, "/{index}/_mappings/{type}"), - new Route(GET, "/{index}/_mapping/{type}"), - new Route(HEAD, "/{index}/_mapping/{type}"), - new Route(GET, "/_mapping/{type}") + new Route(GET, "/{index}/_mappings") ) ); } @@ -111,22 +85,9 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); - final String[] types = request.paramAsStringArrayOrEmptyIfAll("type"); - boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - - if (request.method().equals(HEAD)) { - deprecationLogger.deprecate("get_mapping_types_removal", "Type exists requests are deprecated, as types have been deprecated."); - } else if (includeTypeName == false && types.length > 0) { - throw new IllegalArgumentException( - "Types cannot be provided in get mapping requests, unless" + " include_type_name is set to true." - ); - } - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - deprecationLogger.deprecate("get_mapping_with_types", TYPES_DEPRECATION_MESSAGE); - } final GetMappingsRequest getMappingsRequest = new GetMappingsRequest(); - getMappingsRequest.indices(indices).types(types); + getMappingsRequest.indices(indices); getMappingsRequest.indicesOptions(IndicesOptions.fromRequest(request, getMappingsRequest.indicesOptions())); final TimeValue timeout = request.paramAsTime("master_timeout", getMappingsRequest.masterNodeTimeout()); getMappingsRequest.masterNodeTimeout(timeout); @@ -146,59 +107,10 @@ public RestResponse buildResponse(final GetMappingsResponse response, final XCon if (threadPool.relativeTimeInMillis() - startTimeMs > timeout.millis()) { throw new OpenSearchTimeoutException("Timed out getting mappings"); } - final ImmutableOpenMap> mappingsByIndex = response - .getMappings(); - if (mappingsByIndex.isEmpty() && types.length != 0) { - builder.close(); - return new BytesRestResponse(channel, new TypeMissingException("_all", String.join(",", types))); - } - - final Set typeNames = new HashSet<>(); - for (final ObjectCursor> cursor : mappingsByIndex.values()) { - for (final ObjectCursor inner : cursor.value.keys()) { - typeNames.add(inner.value); - } - } - - final SortedSet difference = Sets.sortedDifference( - Arrays.stream(types).collect(Collectors.toSet()), - typeNames - ); - - // now remove requested aliases that contain wildcards that are simple matches - final List matches = new ArrayList<>(); - outer: for (final String pattern : difference) { - if (pattern.contains("*")) { - for (final String typeName : typeNames) { - if (Regex.simpleMatch(pattern, typeName)) { - matches.add(pattern); - continue outer; - } - } - } - } - difference.removeAll(matches); - - final RestStatus status; builder.startObject(); - { - if (difference.isEmpty()) { - status = RestStatus.OK; - } else { - status = RestStatus.NOT_FOUND; - final String message = String.format( - Locale.ROOT, - "type" + (difference.size() == 1 ? "" : "s") + " [%s] missing", - Strings.collectionToCommaDelimitedString(difference) - ); - builder.field("error", message); - builder.field("status", status.getStatus()); - } - response.toXContent(builder, request); - } + response.toXContent(builder, request); builder.endObject(); - - return new BytesRestResponse(status, builder); + return new BytesRestResponse(RestStatus.OK, builder); } }.onResponse(getMappingsResponse))); } diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestIndicesStatsAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestIndicesStatsAction.java index 696bff33a73a4..eabe14a7614ac 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestIndicesStatsAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestIndicesStatsAction.java @@ -102,7 +102,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC + "options changed"; indicesStatsRequest.indicesOptions(IndicesOptions.fromRequest(request, defaultIndicesOption)); indicesStatsRequest.indices(Strings.splitStringByCommaToArray(request.param("index"))); - indicesStatsRequest.types(Strings.splitStringByCommaToArray(request.param("types"))); Set metrics = Strings.tokenizeByCommaToSet(request.param("metric", "_all")); // short cut, if no metrics have been specified in URI @@ -139,10 +138,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC indicesStatsRequest.groups(Strings.splitStringByCommaToArray(request.param("groups"))); } - if (request.hasParam("types")) { - indicesStatsRequest.types(Strings.splitStringByCommaToArray(request.param("types"))); - } - if (indicesStatsRequest.completion() && (request.hasParam("fields") || request.hasParam("completion_fields"))) { indicesStatsRequest.completionFields( request.paramAsStringArray("completion_fields", request.paramAsStringArray("fields", Strings.EMPTY_ARRAY)) diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutIndexTemplateAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutIndexTemplateAction.java index da89691c60c9d..f17ac495b494b 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutIndexTemplateAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutIndexTemplateAction.java @@ -53,11 +53,7 @@ import static org.opensearch.rest.RestRequest.Method.PUT; public class RestPutIndexTemplateAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestPutIndexTemplateAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" - + " Specifying include_type_name in put index template requests is deprecated." - + " The parameter will be removed in the next major version."; @Override public List routes() { @@ -71,12 +67,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest(request.param("name")); - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - deprecationLogger.deprecate("put_index_template_with_types", TYPES_DEPRECATION_MESSAGE); - } if (request.hasParam("template")) { deprecationLogger.deprecate( "put_index_template_deprecated_parameter", @@ -92,7 +83,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC putRequest.cause(request.param("cause", "")); Map sourceAsMap = XContentHelper.convertToMap(request.requiredContent(), false, request.getXContentType()).v2(); - sourceAsMap = RestCreateIndexAction.prepareMappings(sourceAsMap, includeTypeName); + sourceAsMap = RestCreateIndexAction.prepareMappings(sourceAsMap); putRequest.source(sourceAsMap); return channel -> client.admin().indices().putTemplate(putRequest, new RestToXContentListener<>(channel)); diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java index a9f9595df1078..f65dea1ebe3d2 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java @@ -36,7 +36,6 @@ import org.opensearch.action.support.IndicesOptions; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Strings; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.index.mapper.MapperService; import org.opensearch.rest.BaseRestHandler; @@ -50,14 +49,10 @@ import static java.util.Arrays.asList; import static java.util.Collections.unmodifiableList; import static org.opensearch.client.Requests.putMappingRequest; -import static org.opensearch.index.mapper.MapperService.isMappingSourceTyped; import static org.opensearch.rest.RestRequest.Method.POST; import static org.opensearch.rest.RestRequest.Method.PUT; public class RestPutMappingAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestPutMappingAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in put " - + "mapping requests is deprecated. The parameter will be removed in the next major version."; @Override public List routes() { @@ -65,20 +60,8 @@ public List routes() { asList( new Route(POST, "/{index}/_mapping/"), new Route(PUT, "/{index}/_mapping/"), - new Route(POST, "/{index}/{type}/_mapping"), - new Route(PUT, "/{index}/{type}/_mapping"), - new Route(POST, "/{index}/_mapping/{type}"), - new Route(PUT, "/{index}/_mapping/{type}"), - new Route(POST, "/_mapping/{type}"), - new Route(PUT, "/_mapping/{type}"), new Route(POST, "/{index}/_mappings/"), - new Route(PUT, "/{index}/_mappings/"), - new Route(POST, "/{index}/{type}/_mappings"), - new Route(PUT, "/{index}/{type}/_mappings"), - new Route(POST, "/{index}/_mappings/{type}"), - new Route(PUT, "/{index}/_mappings/{type}"), - new Route(POST, "/_mappings/{type}"), - new Route(PUT, "/_mappings/{type}") + new Route(PUT, "/{index}/_mappings/") ) ); } @@ -90,21 +73,12 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - deprecationLogger.deprecate("put_mapping_with_types", TYPES_DEPRECATION_MESSAGE); - } PutMappingRequest putMappingRequest = putMappingRequest(Strings.splitStringByCommaToArray(request.param("index"))); - - final String type = request.param("type"); - putMappingRequest.type(includeTypeName ? type : MapperService.SINGLE_MAPPING_NAME); - Map sourceAsMap = XContentHelper.convertToMap(request.requiredContent(), false, request.getXContentType()).v2(); - if (includeTypeName == false && (type != null || isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, sourceAsMap))) { - throw new IllegalArgumentException( - "Types cannot be provided in put mapping requests, unless " + "the include_type_name parameter is set to true." - ); + + if (MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, sourceAsMap)) { + throw new IllegalArgumentException("Types cannot be provided in put mapping requests"); } putMappingRequest.source(sourceAsMap); diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestRolloverIndexAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestRolloverIndexAction.java index bfa34b1bea763..08b84cc6fe6cc 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestRolloverIndexAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestRolloverIndexAction.java @@ -65,12 +65,11 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { + if (request.hasParam("include_type_name")) { deprecationLogger.deprecate("index_rollover_with_types", TYPES_DEPRECATION_MESSAGE); } RolloverRequest rolloverIndexRequest = new RolloverRequest(request.param("index"), request.param("new_index")); - request.applyContentParser(parser -> rolloverIndexRequest.fromXContent(includeTypeName, parser)); + request.applyContentParser(parser -> rolloverIndexRequest.fromXContent(parser)); rolloverIndexRequest.dryRun(request.paramAsBoolean("dry_run", false)); rolloverIndexRequest.timeout(request.paramAsTime("timeout", rolloverIndexRequest.timeout())); rolloverIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", rolloverIndexRequest.masterNodeTimeout())); diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestValidateQueryAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestValidateQueryAction.java index d40712e9d2ca2..bf300d5395b79 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestValidateQueryAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestValidateQueryAction.java @@ -39,7 +39,6 @@ import org.opensearch.client.node.NodeClient; import org.opensearch.common.ParsingException; import org.opensearch.common.Strings; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.BytesRestResponse; @@ -58,9 +57,6 @@ import static org.opensearch.rest.RestStatus.OK; public class RestValidateQueryAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestValidateQueryAction.class); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + " Specifying types in validate query requests is deprecated."; - @Override public List routes() { return unmodifiableList( @@ -68,9 +64,7 @@ public List routes() { new Route(GET, "/_validate/query"), new Route(POST, "/_validate/query"), new Route(GET, "/{index}/_validate/query"), - new Route(POST, "/{index}/_validate/query"), - new Route(GET, "/{index}/{type}/_validate/query"), - new Route(POST, "/{index}/{type}/_validate/query") + new Route(POST, "/{index}/_validate/query") ) ); } @@ -86,11 +80,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC validateQueryRequest.indicesOptions(IndicesOptions.fromRequest(request, validateQueryRequest.indicesOptions())); validateQueryRequest.explain(request.paramAsBoolean("explain", false)); - if (request.hasParam("type")) { - deprecationLogger.deprecate("validate_query_with_types", TYPES_DEPRECATION_MESSAGE); - validateQueryRequest.types(Strings.splitStringByCommaToArray(request.param("type"))); - } - validateQueryRequest.rewrite(request.paramAsBoolean("rewrite", false)); validateQueryRequest.allShards(request.paramAsBoolean("all_shards", false)); diff --git a/server/src/main/java/org/opensearch/rest/action/document/RestBulkAction.java b/server/src/main/java/org/opensearch/rest/action/document/RestBulkAction.java index 52f4e6bc18e2f..c140514e3c92c 100644 --- a/server/src/main/java/org/opensearch/rest/action/document/RestBulkAction.java +++ b/server/src/main/java/org/opensearch/rest/action/document/RestBulkAction.java @@ -38,13 +38,10 @@ import org.opensearch.action.support.ActiveShardCount; import org.opensearch.client.Requests; import org.opensearch.client.node.NodeClient; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.settings.Settings; -import org.opensearch.index.mapper.MapperService; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; import org.opensearch.rest.action.RestStatusToXContentListener; -import org.opensearch.rest.action.search.RestSearchAction; import org.opensearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; @@ -57,18 +54,16 @@ /** *

        - * { "index" : { "_index" : "test", "_type" : "type1", "_id" : "1" }
        + * { "index" : { "_index" : "test", "_id" : "1" }
          * { "type1" : { "field1" : "value1" } }
        - * { "delete" : { "_index" : "test", "_type" : "type1", "_id" : "2" } }
        - * { "create" : { "_index" : "test", "_type" : "type1", "_id" : "1" }
        + * { "delete" : { "_index" : "test", "_id" : "2" } }
        + * { "create" : { "_index" : "test", "_id" : "1" }
          * { "type1" : { "field1" : "value1" } }
          * 
        */ public class RestBulkAction extends BaseRestHandler { private final boolean allowExplicitIndex; - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestSearchAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + " Specifying types in bulk requests is deprecated."; public RestBulkAction(Settings settings) { this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings); @@ -77,15 +72,7 @@ public RestBulkAction(Settings settings) { @Override public List routes() { return unmodifiableList( - asList( - new Route(POST, "/_bulk"), - new Route(PUT, "/_bulk"), - new Route(POST, "/{index}/_bulk"), - new Route(PUT, "/{index}/_bulk"), - // Deprecated typed endpoints. - new Route(POST, "/{index}/{type}/_bulk"), - new Route(PUT, "/{index}/{type}/_bulk") - ) + asList(new Route(POST, "/_bulk"), new Route(PUT, "/_bulk"), new Route(POST, "/{index}/_bulk"), new Route(PUT, "/{index}/_bulk")) ); } @@ -98,12 +85,6 @@ public String getName() { public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { BulkRequest bulkRequest = Requests.bulkRequest(); String defaultIndex = request.param("index"); - String defaultType = request.param("type"); - if (defaultType == null) { - defaultType = MapperService.SINGLE_MAPPING_NAME; - } else { - deprecationLogger.deprecate("bulk_with_types", RestBulkAction.TYPES_DEPRECATION_MESSAGE); - } String defaultRouting = request.param("routing"); FetchSourceContext defaultFetchSourceContext = FetchSourceContext.parseFromRestRequest(request); String defaultPipeline = request.param("pipeline"); @@ -117,7 +98,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC bulkRequest.add( request.requiredContent(), defaultIndex, - defaultType, defaultRouting, defaultFetchSourceContext, defaultPipeline, diff --git a/server/src/main/java/org/opensearch/rest/action/document/RestDeleteAction.java b/server/src/main/java/org/opensearch/rest/action/document/RestDeleteAction.java index 25a50a49d3aa0..f9f5933a44c95 100644 --- a/server/src/main/java/org/opensearch/rest/action/document/RestDeleteAction.java +++ b/server/src/main/java/org/opensearch/rest/action/document/RestDeleteAction.java @@ -35,7 +35,6 @@ import org.opensearch.action.delete.DeleteRequest; import org.opensearch.action.support.ActiveShardCount; import org.opensearch.client.node.NodeClient; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.index.VersionType; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; @@ -50,19 +49,10 @@ import static org.opensearch.rest.RestRequest.Method.DELETE; public class RestDeleteAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestDeleteAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " - + "document index requests is deprecated, use the /{index}/_doc/{id} endpoint instead."; @Override public List routes() { - return unmodifiableList( - asList( - new Route(DELETE, "/{index}/_doc/{id}"), - // Deprecated typed endpoint. - new Route(DELETE, "/{index}/{type}/{id}") - ) - ); + return unmodifiableList(asList(new Route(DELETE, "/{index}/_doc/{id}"))); } @Override @@ -72,14 +62,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - DeleteRequest deleteRequest; - if (request.hasParam("type")) { - deprecationLogger.deprecate("delete_with_types", TYPES_DEPRECATION_MESSAGE); - deleteRequest = new DeleteRequest(request.param("index"), request.param("type"), request.param("id")); - } else { - deleteRequest = new DeleteRequest(request.param("index"), request.param("id")); - } - + DeleteRequest deleteRequest = new DeleteRequest(request.param("index"), request.param("id")); deleteRequest.routing(request.param("routing")); deleteRequest.timeout(request.paramAsTime("timeout", DeleteRequest.DEFAULT_TIMEOUT)); deleteRequest.setRefreshPolicy(request.param("refresh")); diff --git a/server/src/main/java/org/opensearch/rest/action/document/RestGetAction.java b/server/src/main/java/org/opensearch/rest/action/document/RestGetAction.java index 18d39edf887b5..a0ec48ee55451 100644 --- a/server/src/main/java/org/opensearch/rest/action/document/RestGetAction.java +++ b/server/src/main/java/org/opensearch/rest/action/document/RestGetAction.java @@ -36,7 +36,6 @@ import org.opensearch.action.get.GetResponse; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Strings; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.index.VersionType; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; @@ -56,9 +55,6 @@ import static org.opensearch.rest.RestStatus.OK; public class RestGetAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " - + "document get requests is deprecated, use the /{index}/_doc/{id} endpoint instead."; @Override public String getName() { @@ -67,27 +63,12 @@ public String getName() { @Override public List routes() { - return unmodifiableList( - asList( - new Route(GET, "/{index}/_doc/{id}"), - new Route(HEAD, "/{index}/_doc/{id}"), - // Deprecated typed endpoints. - new Route(GET, "/{index}/{type}/{id}"), - new Route(HEAD, "/{index}/{type}/{id}") - ) - ); + return unmodifiableList(asList(new Route(GET, "/{index}/_doc/{id}"), new Route(HEAD, "/{index}/_doc/{id}"))); } @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - GetRequest getRequest; - if (request.hasParam("type")) { - deprecationLogger.deprecate("get_with_types", TYPES_DEPRECATION_MESSAGE); - getRequest = new GetRequest(request.param("index"), request.param("type"), request.param("id")); - } else { - getRequest = new GetRequest(request.param("index"), request.param("id")); - } - + GetRequest getRequest = new GetRequest(request.param("index"), request.param("id")); getRequest.refresh(request.paramAsBoolean("refresh", getRequest.refresh())); getRequest.routing(request.param("routing")); getRequest.preference(request.param("preference")); diff --git a/server/src/main/java/org/opensearch/rest/action/document/RestGetSourceAction.java b/server/src/main/java/org/opensearch/rest/action/document/RestGetSourceAction.java index cd6b3b16e79cd..801ab85039d2d 100644 --- a/server/src/main/java/org/opensearch/rest/action/document/RestGetSourceAction.java +++ b/server/src/main/java/org/opensearch/rest/action/document/RestGetSourceAction.java @@ -38,7 +38,6 @@ import org.opensearch.action.get.GetResponse; import org.opensearch.client.node.NodeClient; import org.opensearch.common.bytes.BytesReference; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.rest.BaseRestHandler; @@ -64,20 +63,9 @@ */ public class RestGetSourceAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetSourceAction.class); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in get_source and exist_source" - + "requests is deprecated."; - @Override public List routes() { - return unmodifiableList( - asList( - new Route(GET, "/{index}/_source/{id}"), - new Route(HEAD, "/{index}/_source/{id}"), - new Route(GET, "/{index}/{type}/{id}/_source"), - new Route(HEAD, "/{index}/{type}/{id}/_source") - ) - ); + return unmodifiableList(asList(new Route(GET, "/{index}/_source/{id}"), new Route(HEAD, "/{index}/_source/{id}"))); } @Override @@ -87,13 +75,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final GetRequest getRequest; - if (request.hasParam("type")) { - deprecationLogger.deprecate("get_source_with_types", TYPES_DEPRECATION_MESSAGE); - getRequest = new GetRequest(request.param("index"), request.param("type"), request.param("id")); - } else { - getRequest = new GetRequest(request.param("index"), request.param("id")); - } + final GetRequest getRequest = new GetRequest(request.param("index"), request.param("id")); getRequest.refresh(request.paramAsBoolean("refresh", getRequest.refresh())); getRequest.routing(request.param("routing")); getRequest.preference(request.param("preference")); @@ -140,13 +122,12 @@ public RestResponse buildResponse(final GetResponse response) throws Exception { */ private void checkResource(final GetResponse response) { final String index = response.getIndex(); - final String type = response.getType(); final String id = response.getId(); if (response.isExists() == false) { - throw new ResourceNotFoundException("Document not found [" + index + "]/[" + type + "]/[" + id + "]"); + throw new ResourceNotFoundException("Document not found [" + index + "]/[" + id + "]"); } else if (response.isSourceEmpty()) { - throw new ResourceNotFoundException("Source not found [" + index + "]/[" + type + "]/[" + id + "]"); + throw new ResourceNotFoundException("Source not found [" + index + "]/[" + id + "]"); } } } diff --git a/server/src/main/java/org/opensearch/rest/action/document/RestIndexAction.java b/server/src/main/java/org/opensearch/rest/action/document/RestIndexAction.java index 07e4723d35264..75f3967c32ba7 100644 --- a/server/src/main/java/org/opensearch/rest/action/document/RestIndexAction.java +++ b/server/src/main/java/org/opensearch/rest/action/document/RestIndexAction.java @@ -123,9 +123,7 @@ public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - IndexRequest indexRequest; - final String type = request.param("type"); - indexRequest = new IndexRequest(request.param("index")); + IndexRequest indexRequest = new IndexRequest(request.param("index")); indexRequest.id(request.param("id")); indexRequest.routing(request.param("routing")); indexRequest.setPipeline(request.param("pipeline")); diff --git a/server/src/main/java/org/opensearch/rest/action/document/RestMultiGetAction.java b/server/src/main/java/org/opensearch/rest/action/document/RestMultiGetAction.java index 514dc26b3e7ee..6713bddfd837d 100644 --- a/server/src/main/java/org/opensearch/rest/action/document/RestMultiGetAction.java +++ b/server/src/main/java/org/opensearch/rest/action/document/RestMultiGetAction.java @@ -35,7 +35,6 @@ import org.opensearch.action.get.MultiGetRequest; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Strings; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.rest.BaseRestHandler; @@ -52,9 +51,6 @@ import static org.opensearch.rest.RestRequest.Method.POST; public class RestMultiGetAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestMultiGetAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + " Specifying types in multi get requests is deprecated."; - private final boolean allowExplicitIndex; public RestMultiGetAction(Settings settings) { @@ -64,15 +60,7 @@ public RestMultiGetAction(Settings settings) { @Override public List routes() { return unmodifiableList( - asList( - new Route(GET, "/_mget"), - new Route(POST, "/_mget"), - new Route(GET, "/{index}/_mget"), - new Route(POST, "/{index}/_mget"), - // Deprecated typed endpoints. - new Route(GET, "/{index}/{type}/_mget"), - new Route(POST, "/{index}/{type}/_mget") - ) + asList(new Route(GET, "/_mget"), new Route(POST, "/_mget"), new Route(GET, "/{index}/_mget"), new Route(POST, "/{index}/_mget")) ); } @@ -83,10 +71,6 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - if (request.param("type") != null) { - deprecationLogger.deprecate("mget_with_types", TYPES_DEPRECATION_MESSAGE); - } - MultiGetRequest multiGetRequest = new MultiGetRequest(); multiGetRequest.refresh(request.paramAsBoolean("refresh", multiGetRequest.refresh())); multiGetRequest.preference(request.param("preference")); @@ -105,22 +89,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC FetchSourceContext defaultFetchSource = FetchSourceContext.parseFromRestRequest(request); try (XContentParser parser = request.contentOrSourceParamParser()) { - multiGetRequest.add( - request.param("index"), - request.param("type"), - sFields, - defaultFetchSource, - request.param("routing"), - parser, - allowExplicitIndex - ); - } - - for (MultiGetRequest.Item item : multiGetRequest.getItems()) { - if (item.type() != null) { - deprecationLogger.deprecate("mget_with_types", TYPES_DEPRECATION_MESSAGE); - break; - } + multiGetRequest.add(request.param("index"), sFields, defaultFetchSource, request.param("routing"), parser, allowExplicitIndex); } return channel -> client.multiGet(multiGetRequest, new RestToXContentListener<>(channel)); diff --git a/server/src/main/java/org/opensearch/rest/action/document/RestMultiTermVectorsAction.java b/server/src/main/java/org/opensearch/rest/action/document/RestMultiTermVectorsAction.java index c2196dc84410d..66b0c004b9fb4 100644 --- a/server/src/main/java/org/opensearch/rest/action/document/RestMultiTermVectorsAction.java +++ b/server/src/main/java/org/opensearch/rest/action/document/RestMultiTermVectorsAction.java @@ -36,8 +36,6 @@ import org.opensearch.action.termvectors.TermVectorsRequest; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Strings; -import org.opensearch.common.logging.DeprecationLogger; -import org.opensearch.index.mapper.MapperService; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; import org.opensearch.rest.action.RestToXContentListener; @@ -51,8 +49,6 @@ import static org.opensearch.rest.RestRequest.Method.POST; public class RestMultiTermVectorsAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestTermVectorsAction.class); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] " + "Specifying types in multi term vector requests is deprecated."; @Override public List routes() { @@ -61,10 +57,7 @@ public List routes() { new Route(GET, "/_mtermvectors"), new Route(POST, "/_mtermvectors"), new Route(GET, "/{index}/_mtermvectors"), - new Route(POST, "/{index}/_mtermvectors"), - // Deprecated typed endpoints. - new Route(GET, "/{index}/{type}/_mtermvectors"), - new Route(POST, "/{index}/{type}/_mtermvectors") + new Route(POST, "/{index}/_mtermvectors") ) ); } @@ -79,13 +72,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC MultiTermVectorsRequest multiTermVectorsRequest = new MultiTermVectorsRequest(); TermVectorsRequest template = new TermVectorsRequest().index(request.param("index")); - if (request.hasParam("type")) { - deprecationLogger.deprecate("mtermvectors_with_types", TYPES_DEPRECATION_MESSAGE); - template.type(request.param("type")); - } else { - template.type(MapperService.SINGLE_MAPPING_NAME); - } - RestTermVectorsAction.readURIParameters(template, request); multiTermVectorsRequest.ids(Strings.commaDelimitedListToStringArray(request.param("ids"))); request.withContentOrSourceParamParserOrNull(p -> multiTermVectorsRequest.add(template, p)); diff --git a/server/src/main/java/org/opensearch/rest/action/document/RestTermVectorsAction.java b/server/src/main/java/org/opensearch/rest/action/document/RestTermVectorsAction.java index ad11adba6c585..727b90474082e 100644 --- a/server/src/main/java/org/opensearch/rest/action/document/RestTermVectorsAction.java +++ b/server/src/main/java/org/opensearch/rest/action/document/RestTermVectorsAction.java @@ -35,10 +35,8 @@ import org.opensearch.action.termvectors.TermVectorsRequest; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Strings; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.index.VersionType; -import org.opensearch.index.mapper.MapperService; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; import org.opensearch.rest.action.RestActions; @@ -59,7 +57,6 @@ * TermVectorsRequest. */ public class RestTermVectorsAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestTermVectorsAction.class); public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] " + "Specifying types in term vector requests is deprecated."; @Override @@ -69,12 +66,7 @@ public List routes() { new Route(GET, "/{index}/_termvectors"), new Route(POST, "/{index}/_termvectors"), new Route(GET, "/{index}/_termvectors/{id}"), - new Route(POST, "/{index}/_termvectors/{id}"), - // Deprecated typed endpoints. - new Route(GET, "/{index}/{type}/_termvectors"), - new Route(POST, "/{index}/{type}/_termvectors"), - new Route(GET, "/{index}/{type}/{id}/_termvectors"), - new Route(POST, "/{index}/{type}/{id}/_termvectors") + new Route(POST, "/{index}/_termvectors/{id}") ) ); } @@ -86,14 +78,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - TermVectorsRequest termVectorsRequest; - if (request.hasParam("type")) { - deprecationLogger.deprecate("termvectors_with_types", TYPES_DEPRECATION_MESSAGE); - termVectorsRequest = new TermVectorsRequest(request.param("index"), request.param("type"), request.param("id")); - } else { - termVectorsRequest = new TermVectorsRequest(request.param("index"), MapperService.SINGLE_MAPPING_NAME, request.param("id")); - } - + TermVectorsRequest termVectorsRequest = new TermVectorsRequest(request.param("index"), request.param("id")); if (request.hasContentOrSourceParam()) { try (XContentParser parser = request.contentOrSourceParamParser()) { TermVectorsRequest.parseRequest(termVectorsRequest, parser); diff --git a/server/src/main/java/org/opensearch/rest/action/document/RestUpdateAction.java b/server/src/main/java/org/opensearch/rest/action/document/RestUpdateAction.java index 7afb0b6cba87c..832d8da4a8fdd 100644 --- a/server/src/main/java/org/opensearch/rest/action/document/RestUpdateAction.java +++ b/server/src/main/java/org/opensearch/rest/action/document/RestUpdateAction.java @@ -38,7 +38,6 @@ import org.opensearch.action.support.ActiveShardCount; import org.opensearch.action.update.UpdateRequest; import org.opensearch.client.node.NodeClient; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.index.VersionType; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; @@ -54,19 +53,10 @@ import static org.opensearch.rest.RestRequest.Method.POST; public class RestUpdateAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestUpdateAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " - + "document update requests is deprecated, use the endpoint /{index}/_update/{id} instead."; @Override public List routes() { - return unmodifiableList( - asList( - new Route(POST, "/{index}/_update/{id}"), - // Deprecated typed endpoint. - new Route(POST, "/{index}/{type}/{id}/_update") - ) - ); + return unmodifiableList(asList(new Route(POST, "/{index}/_update/{id}"))); } @Override @@ -77,12 +67,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { UpdateRequest updateRequest; - if (request.hasParam("type")) { - deprecationLogger.deprecate("update_with_types", TYPES_DEPRECATION_MESSAGE); - updateRequest = new UpdateRequest(request.param("index"), request.param("type"), request.param("id")); - } else { - updateRequest = new UpdateRequest(request.param("index"), request.param("id")); - } + updateRequest = new UpdateRequest(request.param("index"), request.param("id")); updateRequest.routing(request.param("routing")); updateRequest.timeout(request.paramAsTime("timeout", updateRequest.timeout())); diff --git a/server/src/main/java/org/opensearch/rest/action/search/RestCountAction.java b/server/src/main/java/org/opensearch/rest/action/search/RestCountAction.java index 79e4e430038a1..6cb00633de441 100644 --- a/server/src/main/java/org/opensearch/rest/action/search/RestCountAction.java +++ b/server/src/main/java/org/opensearch/rest/action/search/RestCountAction.java @@ -37,7 +37,6 @@ import org.opensearch.action.support.IndicesOptions; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Strings; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.index.query.QueryBuilder; import org.opensearch.rest.BaseRestHandler; @@ -59,8 +58,6 @@ import static org.opensearch.search.internal.SearchContext.DEFAULT_TERMINATE_AFTER; public class RestCountAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestCountAction.class); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + " Specifying types in count requests is deprecated."; @Override public List routes() { @@ -69,10 +66,7 @@ public List routes() { new Route(GET, "/_count"), new Route(POST, "/_count"), new Route(GET, "/{index}/_count"), - new Route(POST, "/{index}/_count"), - // Deprecated typed endpoints. - new Route(GET, "/{index}/{type}/_count"), - new Route(POST, "/{index}/{type}/_count") + new Route(POST, "/{index}/_count") ) ); } @@ -104,11 +98,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC searchSourceBuilder.minScore(minScore); } - if (request.hasParam("type")) { - deprecationLogger.deprecate("count_with_types", TYPES_DEPRECATION_MESSAGE); - countRequest.types(Strings.splitStringByCommaToArray(request.param("type"))); - } - countRequest.preference(request.param("preference")); final int terminateAfter = request.paramAsInt("terminate_after", DEFAULT_TERMINATE_AFTER); diff --git a/server/src/main/java/org/opensearch/rest/action/search/RestExplainAction.java b/server/src/main/java/org/opensearch/rest/action/search/RestExplainAction.java index 46841e599bfda..2b73e145cf5ca 100644 --- a/server/src/main/java/org/opensearch/rest/action/search/RestExplainAction.java +++ b/server/src/main/java/org/opensearch/rest/action/search/RestExplainAction.java @@ -35,7 +35,6 @@ import org.opensearch.action.explain.ExplainRequest; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Strings; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.index.query.QueryBuilder; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; @@ -55,20 +54,10 @@ * Rest action for computing a score explanation for specific documents. */ public class RestExplainAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestExplainAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] " + "Specifying a type in explain requests is deprecated."; @Override public List routes() { - return unmodifiableList( - asList( - new Route(GET, "/{index}/_explain/{id}"), - new Route(POST, "/{index}/_explain/{id}"), - // Deprecated typed endpoints. - new Route(GET, "/{index}/{type}/{id}/_explain"), - new Route(POST, "/{index}/{type}/{id}/_explain") - ) - ); + return unmodifiableList(asList(new Route(GET, "/{index}/_explain/{id}"), new Route(POST, "/{index}/_explain/{id}"))); } @Override @@ -78,14 +67,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - ExplainRequest explainRequest; - if (request.hasParam("type")) { - deprecationLogger.deprecate("explain_with_types", TYPES_DEPRECATION_MESSAGE); - explainRequest = new ExplainRequest(request.param("index"), request.param("type"), request.param("id")); - } else { - explainRequest = new ExplainRequest(request.param("index"), request.param("id")); - } - + ExplainRequest explainRequest = new ExplainRequest(request.param("index"), request.param("id")); explainRequest.parent(request.param("parent")); explainRequest.routing(request.param("routing")); explainRequest.preference(request.param("preference")); diff --git a/server/src/main/java/org/opensearch/rest/action/search/RestMultiSearchAction.java b/server/src/main/java/org/opensearch/rest/action/search/RestMultiSearchAction.java index de433530d0629..8262f8199a704 100644 --- a/server/src/main/java/org/opensearch/rest/action/search/RestMultiSearchAction.java +++ b/server/src/main/java/org/opensearch/rest/action/search/RestMultiSearchAction.java @@ -68,7 +68,6 @@ public class RestMultiSearchAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestMultiSearchAction.class); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + " Specifying types in multi search requests is deprecated."; private static final Set RESPONSE_PARAMS; @@ -92,10 +91,7 @@ public List routes() { new Route(GET, "/_msearch"), new Route(POST, "/_msearch"), new Route(GET, "/{index}/_msearch"), - new Route(POST, "/{index}/_msearch"), - // Deprecated typed endpoints. - new Route(GET, "/{index}/{type}/_msearch"), - new Route(POST, "/{index}/{type}/_msearch") + new Route(POST, "/{index}/_msearch") ) ); } @@ -108,13 +104,6 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final MultiSearchRequest multiSearchRequest = parseRequest(request, client.getNamedWriteableRegistry(), allowExplicitIndex); - // Emit a single deprecation message if any search request contains types. - for (SearchRequest searchRequest : multiSearchRequest.requests()) { - if (searchRequest.types().length > 0) { - deprecationLogger.deprecate("msearch_with_types", TYPES_DEPRECATION_MESSAGE); - break; - } - } return channel -> { final RestCancellableNodeClient cancellableClient = new RestCancellableNodeClient(client, request.getHttpChannel()); cancellableClient.execute(MultiSearchAction.INSTANCE, multiSearchRequest, new RestToXContentListener<>(channel)); @@ -192,7 +181,6 @@ public static void parseMultiLineRequest( ) throws IOException { String[] indices = Strings.splitStringByCommaToArray(request.param("index")); - String[] types = Strings.splitStringByCommaToArray(request.param("type")); String searchType = request.param("search_type"); boolean ccsMinimizeRoundtrips = request.paramAsBoolean("ccs_minimize_roundtrips", true); String routing = request.param("routing"); @@ -206,7 +194,6 @@ public static void parseMultiLineRequest( consumer, indices, indicesOptions, - types, routing, searchType, ccsMinimizeRoundtrips, diff --git a/server/src/main/java/org/opensearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/opensearch/rest/action/search/RestSearchAction.java index 54504685d4bfb..b84200407462f 100644 --- a/server/src/main/java/org/opensearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/opensearch/rest/action/search/RestSearchAction.java @@ -42,7 +42,6 @@ import org.opensearch.common.Booleans; import org.opensearch.common.Strings; import org.opensearch.common.io.stream.NamedWriteableRegistry; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.index.query.QueryBuilder; import org.opensearch.rest.BaseRestHandler; @@ -89,9 +88,6 @@ public class RestSearchAction extends BaseRestHandler { RESPONSE_PARAMS = Collections.unmodifiableSet(responseParams); } - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestSearchAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + " Specifying types in search requests is deprecated."; - @Override public String getName() { return "search_action"; @@ -104,10 +100,7 @@ public List routes() { new Route(GET, "/_search"), new Route(POST, "/_search"), new Route(GET, "/{index}/_search"), - new Route(POST, "/{index}/_search"), - // Deprecated typed endpoints. - new Route(GET, "/{index}/{type}/_search"), - new Route(POST, "/{index}/{type}/_search") + new Route(POST, "/{index}/_search") ) ); } @@ -199,10 +192,6 @@ public static void parseSearchRequest( searchRequest.scroll(new Scroll(parseTimeValue(scroll, null, "scroll"))); } - if (request.hasParam("type")) { - deprecationLogger.deprecate("search_with_types", TYPES_DEPRECATION_MESSAGE); - searchRequest.types(Strings.splitStringByCommaToArray(request.param("type"))); - } searchRequest.routing(request.param("routing")); searchRequest.preference(request.param("preference")); searchRequest.indicesOptions(IndicesOptions.fromRequest(request, searchRequest.indicesOptions())); diff --git a/server/src/main/java/org/opensearch/script/ExplainableScoreScript.java b/server/src/main/java/org/opensearch/script/ExplainableScoreScript.java index fb7dd7ded501b..6ea3a322449e5 100644 --- a/server/src/main/java/org/opensearch/script/ExplainableScoreScript.java +++ b/server/src/main/java/org/opensearch/script/ExplainableScoreScript.java @@ -33,6 +33,7 @@ package org.opensearch.script; import org.apache.lucene.search.Explanation; +import org.opensearch.common.Nullable; import java.io.IOException; @@ -49,7 +50,21 @@ public interface ExplainableScoreScript { * want to explain how that was computed. * * @param subQueryScore the Explanation for _score + * @deprecated please use {@code explain(Explanation subQueryScore, @Nullable String scriptName)} */ + @Deprecated Explanation explain(Explanation subQueryScore) throws IOException; + /** + * Build the explanation of the current document being scored + * The script score needs the Explanation of the sub query score because it might use _score and + * want to explain how that was computed. + * + * @param subQueryScore the Explanation for _score + * @param scriptName the script name + */ + default Explanation explain(Explanation subQueryScore, @Nullable String scriptName) throws IOException { + return explain(subQueryScore); + } + } diff --git a/server/src/main/java/org/opensearch/script/ScriptedMetricAggContexts.java b/server/src/main/java/org/opensearch/script/ScriptedMetricAggContexts.java index 7e85c5cdd725d..1f187f7f0e8f5 100644 --- a/server/src/main/java/org/opensearch/script/ScriptedMetricAggContexts.java +++ b/server/src/main/java/org/opensearch/script/ScriptedMetricAggContexts.java @@ -126,7 +126,7 @@ public Map getState() { return state; } - // Return the doc as a map (instead of LeafDocLookup) in order to abide by type whitelisting rules for + // Return the doc as a map (instead of LeafDocLookup) in order to abide by type allowlisting rules for // Painless scripts. public Map> getDoc() { return leafLookup == null ? null : leafLookup.doc(); diff --git a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java index 9ee1e6c15a2e7..a641f2e625e16 100644 --- a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java @@ -55,7 +55,6 @@ import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.ObjectMapper; -import org.opensearch.index.mapper.TypeFieldMapper; import org.opensearch.index.query.AbstractQueryBuilder; import org.opensearch.index.query.ParsedQuery; import org.opensearch.index.query.QueryBuilder; @@ -177,7 +176,8 @@ final class DefaultSearchContext extends SearchContext { TimeValue timeout, FetchPhase fetchPhase, boolean lowLevelCancellation, - Version minNodeVersion + Version minNodeVersion, + boolean validate ) throws IOException { this.readerContext = readerContext; this.request = request; @@ -207,9 +207,9 @@ final class DefaultSearchContext extends SearchContext { request.shardId().id(), this.searcher, request::nowInMillis, - shardTarget.getClusterAlias() + shardTarget.getClusterAlias(), + validate ); - queryShardContext.setTypes(request.types()); queryBoost = request.indexBoost(); this.lowLevelCancellation = lowLevelCancellation; } @@ -321,11 +321,6 @@ public void preProcess(boolean rewrite) { @Override public Query buildFilteredQuery(Query query) { List filters = new ArrayList<>(); - Query typeFilter = createTypeFilter(queryShardContext.getTypes()); - if (typeFilter != null) { - filters.add(typeFilter); - } - if (mapperService().hasNested() && new NestedHelper(mapperService()).mightMatchNestedDocs(query) && (aliasFilter == null || new NestedHelper(mapperService()).mightMatchNestedDocs(aliasFilter))) { @@ -357,17 +352,6 @@ && new NestedHelper(mapperService()).mightMatchNestedDocs(query) } } - private Query createTypeFilter(String[] types) { - if (types != null && types.length >= 1) { - if (mapperService().documentMapper() == null) { - return null; - } - TypeFieldMapper.TypeFieldType ft = new TypeFieldMapper.TypeFieldType(mapperService().documentMapper().type()); - return ft.typeFilter(types); - } - return null; - } - @Override public ShardSearchContextId id() { return readerContext.id(); diff --git a/server/src/main/java/org/opensearch/search/SearchHit.java b/server/src/main/java/org/opensearch/search/SearchHit.java index 80ed3268780c3..daae83e408530 100644 --- a/server/src/main/java/org/opensearch/search/SearchHit.java +++ b/server/src/main/java/org/opensearch/search/SearchHit.java @@ -35,6 +35,7 @@ import org.apache.lucene.search.Explanation; import org.opensearch.LegacyESVersion; import org.opensearch.OpenSearchParseException; +import org.opensearch.Version; import org.opensearch.action.OriginalIndices; import org.opensearch.common.Nullable; import org.opensearch.common.ParseField; @@ -98,7 +99,6 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable documentFields, Map metaFields) { - this(docId, id, type, null, documentFields, metaFields); + public SearchHit(int docId, String id, Map documentFields, Map metaFields) { + this(docId, id, null, documentFields, metaFields); } public SearchHit( int nestedTopDocId, String id, - Text type, NestedIdentity nestedIdentity, Map documentFields, Map metaFields @@ -155,7 +154,6 @@ public SearchHit( } else { this.id = null; } - this.type = type; this.nestedIdentity = nestedIdentity; this.documentFields = documentFields == null ? emptyMap() : documentFields; this.metaFields = metaFields == null ? emptyMap() : metaFields; @@ -165,7 +163,9 @@ public SearchHit(StreamInput in) throws IOException { docId = -1; score = in.readFloat(); id = in.readOptionalText(); - type = in.readOptionalText(); + if (in.getVersion().before(Version.V_2_0_0)) { + in.readOptionalText(); + } nestedIdentity = in.readOptionalWriteable(NestedIdentity::new); version = in.readLong(); seqNo = in.readZLong(); @@ -261,11 +261,15 @@ private void writeFields(StreamOutput out, Map fields) th } } + private static final Text SINGLE_MAPPING_TYPE = new Text(MapperService.SINGLE_MAPPING_NAME); + @Override public void writeTo(StreamOutput out) throws IOException { out.writeFloat(score); out.writeOptionalText(id); - out.writeOptionalText(type); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeOptionalText(SINGLE_MAPPING_TYPE); + } out.writeOptionalWriteable(nestedIdentity); out.writeLong(version); out.writeZLong(seqNo); @@ -376,17 +380,6 @@ public String getId() { return id != null ? id.string() : null; } - /** - * The type of the document. - * - * @deprecated Types are in the process of being removed. Instead of using a type, prefer to - * filter on a field on the document. - */ - @Deprecated - public String getType() { - return type != null ? type.string() : null; - } - /** * If this is a nested hit then nested reference information is returned otherwise null is returned. */ @@ -597,7 +590,6 @@ public void setInnerHits(Map innerHits) { public static class Fields { static final String _INDEX = "_index"; - static final String _TYPE = "_type"; static final String _ID = "_id"; static final String _VERSION = "_version"; static final String _SEQ_NO = "_seq_no"; @@ -641,9 +633,6 @@ public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) t if (index != null) { builder.field(Fields._INDEX, RemoteClusterAware.buildRemoteIndexName(clusterAlias, index)); } - if (type != null) { - builder.field(Fields._TYPE, type); - } if (id != null) { builder.field(Fields._ID, id); } @@ -762,7 +751,6 @@ public static SearchHit fromXContent(XContentParser parser) { } public static void declareInnerHitsParseFields(ObjectParser, Void> parser) { - parser.declareString((map, value) -> map.put(Fields._TYPE, new Text(value)), new ParseField(Fields._TYPE)); parser.declareString((map, value) -> map.put(Fields._INDEX, value), new ParseField(Fields._INDEX)); parser.declareString((map, value) -> map.put(Fields._ID, value), new ParseField(Fields._ID)); parser.declareString((map, value) -> map.put(Fields._NODE, value), new ParseField(Fields._NODE)); @@ -822,12 +810,11 @@ public static void declareInnerHitsParseFields(ObjectParser, public static SearchHit createFromMap(Map values) { String id = get(Fields._ID, values, null); - Text type = get(Fields._TYPE, values, null); NestedIdentity nestedIdentity = get(NestedIdentity._NESTED, values, null); Map metaFields = get(METADATA_FIELDS, values, Collections.emptyMap()); Map documentFields = get(DOCUMENT_FIELDS, values, Collections.emptyMap()); - SearchHit searchHit = new SearchHit(-1, id, type, nestedIdentity, documentFields, metaFields); + SearchHit searchHit = new SearchHit(-1, id, nestedIdentity, documentFields, metaFields); String index = get(Fields._INDEX, values, null); String clusterAlias = null; if (index != null) { @@ -972,7 +959,6 @@ public boolean equals(Object obj) { } SearchHit other = (SearchHit) obj; return Objects.equals(id, other.id) - && Objects.equals(type, other.type) && Objects.equals(nestedIdentity, other.nestedIdentity) && Objects.equals(version, other.version) && Objects.equals(seqNo, other.seqNo) diff --git a/server/src/main/java/org/opensearch/search/SearchModule.java b/server/src/main/java/org/opensearch/search/SearchModule.java index cdc2509bbcb00..c052f7f89e14e 100644 --- a/server/src/main/java/org/opensearch/search/SearchModule.java +++ b/server/src/main/java/org/opensearch/search/SearchModule.java @@ -89,7 +89,6 @@ import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.index.query.TermsQueryBuilder; import org.opensearch.index.query.TermsSetQueryBuilder; -import org.opensearch.index.query.TypeQueryBuilder; import org.opensearch.index.query.WildcardQueryBuilder; import org.opensearch.index.query.WrapperQueryBuilder; import org.opensearch.index.query.functionscore.ExponentialDecayFunctionBuilder; @@ -1183,7 +1182,6 @@ private void registerQueryParsers(List plugins) { registerQuery( new QuerySpec<>(SimpleQueryStringBuilder.NAME, SimpleQueryStringBuilder::new, SimpleQueryStringBuilder::fromXContent) ); - registerQuery(new QuerySpec<>(TypeQueryBuilder.NAME, TypeQueryBuilder::new, TypeQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(ScriptQueryBuilder.NAME, ScriptQueryBuilder::new, ScriptQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(GeoDistanceQueryBuilder.NAME, GeoDistanceQueryBuilder::new, GeoDistanceQueryBuilder::fromXContent)); registerQuery( diff --git a/server/src/main/java/org/opensearch/search/SearchService.java b/server/src/main/java/org/opensearch/search/SearchService.java index 7dc993f4f1cd9..de4586efd60b1 100644 --- a/server/src/main/java/org/opensearch/search/SearchService.java +++ b/server/src/main/java/org/opensearch/search/SearchService.java @@ -448,8 +448,8 @@ public void executeQueryPhase( SearchShardTask task, ActionListener listener ) { - assert request.canReturnNullResponseIfMatchNoDocs() == false - || request.numberOfShards() > 1 : "empty responses require more than one shard"; + assert request.canReturnNullResponseIfMatchNoDocs() == false || request.numberOfShards() > 1 + : "empty responses require more than one shard"; final IndexShard shard = getShard(request); rewriteAndFetchShardRequest(shard, request, new ActionListener() { @Override @@ -816,7 +816,7 @@ final SearchContext createContext( SearchShardTask task, boolean includeAggregations ) throws IOException { - final DefaultSearchContext context = createSearchContext(readerContext, request, defaultSearchTimeout); + final DefaultSearchContext context = createSearchContext(readerContext, request, defaultSearchTimeout, false); try { if (request.scroll() != null) { context.scrollContext().scroll = request.scroll(); @@ -842,19 +842,27 @@ final SearchContext createContext( return context; } - public DefaultSearchContext createSearchContext(ShardSearchRequest request, TimeValue timeout) throws IOException { + public DefaultSearchContext createSearchContext(ShardSearchRequest request, TimeValue timeout, boolean validate) throws IOException { final IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); final IndexShard indexShard = indexService.getShard(request.shardId().getId()); final Engine.SearcherSupplier reader = indexShard.acquireSearcherSupplier(); final ShardSearchContextId id = new ShardSearchContextId(sessionId, idGenerator.incrementAndGet()); try (ReaderContext readerContext = new ReaderContext(id, indexService, indexShard, reader, -1L, true)) { - DefaultSearchContext searchContext = createSearchContext(readerContext, request, timeout); + DefaultSearchContext searchContext = createSearchContext(readerContext, request, timeout, validate); searchContext.addReleasable(readerContext.markAsUsed(0L)); return searchContext; } } - private DefaultSearchContext createSearchContext(ReaderContext reader, ShardSearchRequest request, TimeValue timeout) + public DefaultSearchContext createValidationContext(ShardSearchRequest request, TimeValue timeout) throws IOException { + return createSearchContext(request, timeout, true); + } + + public DefaultSearchContext createSearchContext(ShardSearchRequest request, TimeValue timeout) throws IOException { + return createSearchContext(request, timeout, false); + } + + private DefaultSearchContext createSearchContext(ReaderContext reader, ShardSearchRequest request, TimeValue timeout, boolean validate) throws IOException { boolean success = false; DefaultSearchContext searchContext = null; @@ -875,7 +883,8 @@ private DefaultSearchContext createSearchContext(ReaderContext reader, ShardSear timeout, fetchPhase, lowLevelCancellation, - clusterService.state().nodes().getMinNodeVersion() + clusterService.state().nodes().getMinNodeVersion(), + validate ); // we clone the query shard context here just for rewriting otherwise we // might end up with incorrect state since we are using now() or script services @@ -1349,6 +1358,13 @@ public QueryRewriteContext getRewriteContext(LongSupplier nowInMillis) { return indicesService.getRewriteContext(nowInMillis); } + /** + * Returns a new {@link QueryRewriteContext} for query validation with the given {@code now} provider + */ + public QueryRewriteContext getValidationRewriteContext(LongSupplier nowInMillis) { + return indicesService.getValidationRewriteContext(nowInMillis); + } + public IndicesService getIndicesService() { return indicesService; } diff --git a/server/src/main/java/org/opensearch/search/aggregations/InternalAggregation.java b/server/src/main/java/org/opensearch/search/aggregations/InternalAggregation.java index 2c2421a249549..7c403bcb9dbbf 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/InternalAggregation.java +++ b/server/src/main/java/org/opensearch/search/aggregations/InternalAggregation.java @@ -226,7 +226,8 @@ public final void writeTo(StreamOutput out) throws IOException { out.writeString(name); out.writeGenericValue(metadata); if (out.getVersion().before(LegacyESVersion.V_7_8_0)) { - assert pipelineAggregatorsForBwcSerialization != null : "serializing to pre-7.8.0 versions should have called mergePipelineTreeForBWCSerialization"; + assert pipelineAggregatorsForBwcSerialization != null + : "serializing to pre-7.8.0 versions should have called mergePipelineTreeForBWCSerialization"; out.writeNamedWriteableList(pipelineAggregatorsForBwcSerialization); } doWriteTo(out); diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesSourceBuilder.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesSourceBuilder.java index 6d047197b38a4..9ac43c812e41c 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesSourceBuilder.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesSourceBuilder.java @@ -81,7 +81,7 @@ public abstract class CompositeValuesSourceBuilder order.writeTo(output), missingOrders); } out.writeList(buckets); diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java index 094c706053703..ac0baf18dfb55 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java @@ -163,7 +163,7 @@ public static > ConstructingO this.valuesField = clone.valuesField; } - AbstractPercentilesAggregationBuilder(StreamInput in) throws IOException { + AbstractPercentilesAggregationBuilder(StreamInput in, ParseField valuesField) throws IOException { super(in); values = in.readDoubleArray(); keyed = in.readBoolean(); @@ -175,6 +175,7 @@ public static > ConstructingO PercentilesMethod method = PercentilesMethod.readFromStream(in); percentilesConfig = PercentilesConfig.fromLegacy(method, compression, numberOfSignificantValueDigits); } + this.valuesField = valuesField; } @Override diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java index d8f3c80f36ff3..037830c63ecdf 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java @@ -82,7 +82,7 @@ private PercentileRanksAggregationBuilder(String name, double[] values, Percenti } public PercentileRanksAggregationBuilder(StreamInput in) throws IOException { - super(in); + super(in, VALUES_FIELD); } private PercentileRanksAggregationBuilder( diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentilesAggregationBuilder.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentilesAggregationBuilder.java index 85e8d37d3fdba..323723bb6deb1 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentilesAggregationBuilder.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentilesAggregationBuilder.java @@ -80,7 +80,7 @@ public static void registerAggregators(ValuesSourceRegistry.Builder builder) { } public PercentilesAggregationBuilder(StreamInput in) throws IOException { - super(in); + super(in, PERCENTS_FIELD); } public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException { diff --git a/server/src/main/java/org/opensearch/search/aggregations/pipeline/MovingFunctions.java b/server/src/main/java/org/opensearch/search/aggregations/pipeline/MovingFunctions.java index a9f3d06c0ea90..e3a57654f94cf 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/pipeline/MovingFunctions.java +++ b/server/src/main/java/org/opensearch/search/aggregations/pipeline/MovingFunctions.java @@ -192,7 +192,7 @@ public static double holt(double[] values, double alpha, double beta) { } /** - * Version of holt that can "forecast", not exposed as a whitelisted function for moving_fn scripts, but + * Version of holt that can "forecast", not exposed as an allowlisted function for moving_fn scripts, but * here as compatibility/code sharing for existing moving_avg agg. Can be removed when moving_avg is gone. */ public static double[] holtForecast(double[] values, double alpha, double beta, int numForecasts) { @@ -268,7 +268,7 @@ public static double holtWinters(double[] values, double alpha, double beta, dou } /** - * Version of holt-winters that can "forecast", not exposed as a whitelisted function for moving_fn scripts, but + * Version of holt-winters that can "forecast", not exposed as an allowlisted function for moving_fn scripts, but * here as compatibility/code sharing for existing moving_avg agg. Can be removed when moving_avg is gone. */ public static double[] holtWintersForecast( diff --git a/server/src/main/java/org/opensearch/search/aggregations/support/AggregationPath.java b/server/src/main/java/org/opensearch/search/aggregations/support/AggregationPath.java index 8731bb3afea5a..98cd9e66d9e7e 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/support/AggregationPath.java +++ b/server/src/main/java/org/opensearch/search/aggregations/support/AggregationPath.java @@ -228,8 +228,8 @@ public Aggregator resolveTopmostAggregator(Aggregator root) { AggregationPath.PathElement token = pathElements.get(0); // TODO both unwrap and subAggregator are only used here! Aggregator aggregator = ProfilingAggregator.unwrap(root.subAggregator(token.name)); - assert (aggregator instanceof SingleBucketAggregator) - || (aggregator instanceof NumericMetricsAggregator) : "this should be picked up before aggregation execution - on validate"; + assert (aggregator instanceof SingleBucketAggregator) || (aggregator instanceof NumericMetricsAggregator) + : "this should be picked up before aggregation execution - on validate"; return aggregator; } diff --git a/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java b/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java index e1537b6aa1934..a74497477099a 100644 --- a/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java @@ -60,7 +60,6 @@ import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.ObjectMapper; import org.opensearch.index.mapper.SourceFieldMapper; -import org.opensearch.index.mapper.Uid; import org.opensearch.search.SearchContextSourcePrinter; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; @@ -330,19 +329,19 @@ private HitContext prepareNonNestedHitContext( Text typeText = documentMapper.typeText(); if (fieldsVisitor == null) { - SearchHit hit = new SearchHit(docId, null, typeText, null, null); + SearchHit hit = new SearchHit(docId, null, null, null); return new HitContext(hit, subReaderContext, subDocId, lookup.source()); } else { SearchHit hit; loadStoredFields(context.mapperService(), fieldReader, fieldsVisitor, subDocId); - Uid uid = fieldsVisitor.uid(); + String id = fieldsVisitor.id(); if (fieldsVisitor.fields().isEmpty() == false) { Map docFields = new HashMap<>(); Map metaFields = new HashMap<>(); fillDocAndMetaFields(context, fieldsVisitor, storedToRequestedFields, docFields, metaFields); - hit = new SearchHit(docId, uid.id(), typeText, docFields, metaFields); + hit = new SearchHit(docId, id, docFields, metaFields); } else { - hit = new SearchHit(docId, uid.id(), typeText, emptyMap(), emptyMap()); + hit = new SearchHit(docId, id, emptyMap(), emptyMap()); } HitContext hitContext = new HitContext(hit, subReaderContext, subDocId, lookup.source()); @@ -375,7 +374,7 @@ private HitContext prepareNestedHitContext( // because the entire _source is only stored with the root document. boolean needSource = sourceRequired(context) || context.highlight() != null; - Uid rootId; + String rootId; Map rootSourceAsMap = null; XContentType rootSourceContentType = null; @@ -383,7 +382,7 @@ private HitContext prepareNestedHitContext( if (context instanceof InnerHitsContext.InnerHitSubContext) { InnerHitsContext.InnerHitSubContext innerHitsContext = (InnerHitsContext.InnerHitSubContext) context; - rootId = innerHitsContext.getRootId(); + rootId = innerHitsContext.getId(); if (needSource) { SourceLookup rootLookup = innerHitsContext.getRootLookup(); @@ -394,7 +393,7 @@ private HitContext prepareNestedHitContext( FieldsVisitor rootFieldsVisitor = new FieldsVisitor(needSource); loadStoredFields(context.mapperService(), storedFieldReader, rootFieldsVisitor, rootDocId); rootFieldsVisitor.postProcess(context.mapperService()); - rootId = rootFieldsVisitor.uid(); + rootId = rootFieldsVisitor.id(); if (needSource) { if (rootFieldsVisitor.source() != null) { @@ -420,7 +419,6 @@ private HitContext prepareNestedHitContext( } DocumentMapper documentMapper = context.mapperService().documentMapper(); - Text typeText = documentMapper.typeText(); ObjectMapper nestedObjectMapper = documentMapper.findNestedObjectMapper(nestedDocId, context, subReaderContext); assert nestedObjectMapper != null; @@ -432,7 +430,7 @@ private HitContext prepareNestedHitContext( nestedObjectMapper ); - SearchHit hit = new SearchHit(nestedTopDocId, rootId.id(), typeText, nestedIdentity, docFields, metaFields); + SearchHit hit = new SearchHit(nestedTopDocId, rootId, nestedIdentity, docFields, metaFields); HitContext hitContext = new HitContext(hit, subReaderContext, nestedDocId, new SourceLookup()); // Use a clean, fresh SourceLookup // for the nested context diff --git a/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsContext.java b/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsContext.java index 792fc61db8ed3..d7ac7d21f1922 100644 --- a/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsContext.java +++ b/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsContext.java @@ -45,7 +45,6 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; import org.opensearch.common.lucene.search.TopDocsAndMaxScore; -import org.opensearch.index.mapper.Uid; import org.opensearch.search.SearchHit; import org.opensearch.search.internal.SearchContext; import org.opensearch.search.internal.SubSearchContext; @@ -98,8 +97,7 @@ public abstract static class InnerHitSubContext extends SubSearchContext { private InnerHitsContext childInnerHits; private Weight innerHitQueryWeight; - // TODO: when types are complete removed just use String instead for the id: - private Uid rootId; + private String id; private SourceLookup rootLookup; protected InnerHitSubContext(String name, SearchContext context) { @@ -141,12 +139,12 @@ public SearchContext parentSearchContext() { * * Since this ID is available on the context, inner hits can avoid re-loading the root _id. */ - public Uid getRootId() { - return rootId; + public String getId() { + return id; } - public void setRootId(Uid rootId) { - this.rootId = rootId; + public void setId(String id) { + this.id = id; } /** diff --git a/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsPhase.java b/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsPhase.java index d9986fc90c9ce..56020d334a446 100644 --- a/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsPhase.java +++ b/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsPhase.java @@ -36,7 +36,6 @@ import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.ScoreDoc; import org.opensearch.common.lucene.search.TopDocsAndMaxScore; -import org.opensearch.index.mapper.Uid; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; import org.opensearch.search.fetch.FetchContext; @@ -95,7 +94,7 @@ private void hitExecute(Map innerHi docIdsToLoad[j] = topDoc.topDocs.scoreDocs[j].doc; } innerHitsContext.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length); - innerHitsContext.setRootId(new Uid(hit.getType(), hit.getId())); + innerHitsContext.setId(hit.getId()); innerHitsContext.setRootLookup(rootLookup); fetchPhase.execute(innerHitsContext); diff --git a/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/PlainHighlighter.java b/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/PlainHighlighter.java index 8171e02eca466..d2699c650d887 100644 --- a/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/PlainHighlighter.java +++ b/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/PlainHighlighter.java @@ -116,7 +116,7 @@ public HighlightField highlight(FieldHighlightContext fieldContext) throws IOExc int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ? 1 : field.fieldOptions().numberOfFragments(); ArrayList fragsList = new ArrayList<>(); List textsToHighlight; - Analyzer analyzer = context.mapperService().documentMapper(hitContext.hit().getType()).mappers().indexAnalyzer(); + Analyzer analyzer = context.mapperService().documentMapper().mappers().indexAnalyzer(); final int maxAnalyzedOffset = context.getIndexSettings().getHighlightMaxAnalyzedOffset(); textsToHighlight = HighlightUtils.loadFieldValues(fieldType, context.getQueryShardContext(), hitContext, fieldContext.forceSource); diff --git a/server/src/main/java/org/opensearch/search/internal/ShardSearchRequest.java b/server/src/main/java/org/opensearch/search/internal/ShardSearchRequest.java index 42690bc88c9e2..f41f7fae8b786 100644 --- a/server/src/main/java/org/opensearch/search/internal/ShardSearchRequest.java +++ b/server/src/main/java/org/opensearch/search/internal/ShardSearchRequest.java @@ -71,6 +71,7 @@ import org.opensearch.transport.TransportRequest; import java.io.IOException; +import java.util.Arrays; import java.util.Map; import java.util.function.Function; @@ -87,7 +88,6 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque private final int numberOfShards; private final SearchType searchType; private final Scroll scroll; - private final String[] types; private final float indexBoost; private final Boolean requestCache; private final long nowInMillis; @@ -152,7 +152,6 @@ public ShardSearchRequest( numberOfShards, searchRequest.searchType(), searchRequest.source(), - searchRequest.types(), searchRequest.requestCache(), aliasFilter, indexBoost, @@ -170,14 +169,13 @@ public ShardSearchRequest( assert searchRequest.allowPartialSearchResults() != null; } - public ShardSearchRequest(ShardId shardId, String[] types, long nowInMillis, AliasFilter aliasFilter) { + public ShardSearchRequest(ShardId shardId, long nowInMillis, AliasFilter aliasFilter) { this( OriginalIndices.NONE, shardId, -1, SearchType.QUERY_THEN_FETCH, null, - types, null, aliasFilter, 1.0f, @@ -198,7 +196,6 @@ private ShardSearchRequest( int numberOfShards, SearchType searchType, SearchSourceBuilder source, - String[] types, Boolean requestCache, AliasFilter aliasFilter, float indexBoost, @@ -215,7 +212,6 @@ private ShardSearchRequest( this.numberOfShards = numberOfShards; this.searchType = searchType; this.source = source; - this.types = types; this.requestCache = requestCache; this.aliasFilter = aliasFilter; this.indexBoost = indexBoost; @@ -240,7 +236,13 @@ public ShardSearchRequest(StreamInput in) throws IOException { numberOfShards = in.readVInt(); scroll = in.readOptionalWriteable(Scroll::new); source = in.readOptionalWriteable(SearchSourceBuilder::new); - types = in.readStringArray(); + if (in.getVersion().before(Version.V_2_0_0)) { + // types no longer relevant so ignore + String[] types = in.readStringArray(); + if (types.length > 0) { + throw new IllegalStateException("types are no longer supported in ids query but found [" + Arrays.toString(types) + "]"); + } + } aliasFilter = new AliasFilter(in); indexBoost = in.readFloat(); nowInMillis = in.readVLong(); @@ -281,7 +283,6 @@ public ShardSearchRequest(ShardSearchRequest clone) { this.numberOfShards = clone.numberOfShards; this.scroll = clone.scroll; this.source = clone.source; - this.types = clone.types; this.aliasFilter = clone.aliasFilter; this.indexBoost = clone.indexBoost; this.nowInMillis = clone.nowInMillis; @@ -314,7 +315,10 @@ protected final void innerWriteTo(StreamOutput out, boolean asKey) throws IOExce } out.writeOptionalWriteable(scroll); out.writeOptionalWriteable(source); - out.writeStringArray(types); + if (out.getVersion().before(Version.V_2_0_0)) { + // types not supported so send an empty array to previous versions + out.writeStringArray(Strings.EMPTY_ARRAY); + } aliasFilter.writeTo(out); out.writeFloat(indexBoost); if (asKey == false) { @@ -363,10 +367,6 @@ public ShardId shardId() { return shardId; } - public String[] types() { - return types; - } - public SearchSourceBuilder source() { return source; } diff --git a/server/src/main/java/org/opensearch/search/lookup/DocLookup.java b/server/src/main/java/org/opensearch/search/lookup/DocLookup.java index be85a59a5db53..13e86c235db57 100644 --- a/server/src/main/java/org/opensearch/search/lookup/DocLookup.java +++ b/server/src/main/java/org/opensearch/search/lookup/DocLookup.java @@ -32,7 +32,6 @@ package org.opensearch.search.lookup; import org.apache.lucene.index.LeafReaderContext; -import org.opensearch.common.Nullable; import org.opensearch.index.fielddata.IndexFieldData; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.MapperService; @@ -43,13 +42,10 @@ public class DocLookup { private final MapperService mapperService; private final Function> fieldDataLookup; - @Nullable - private final String[] types; - DocLookup(MapperService mapperService, Function> fieldDataLookup, @Nullable String[] types) { + DocLookup(MapperService mapperService, Function> fieldDataLookup) { this.mapperService = mapperService; this.fieldDataLookup = fieldDataLookup; - this.types = types; } public MapperService mapperService() { @@ -61,10 +57,6 @@ public IndexFieldData getForField(MappedFieldType fieldType) { } public LeafDocLookup getLeafDocLookup(LeafReaderContext context) { - return new LeafDocLookup(mapperService, fieldDataLookup, types, context); - } - - public String[] getTypes() { - return types; + return new LeafDocLookup(mapperService, fieldDataLookup, context); } } diff --git a/server/src/main/java/org/opensearch/search/lookup/FieldsLookup.java b/server/src/main/java/org/opensearch/search/lookup/FieldsLookup.java index 9fd8268ad4b67..9af22c65aba28 100644 --- a/server/src/main/java/org/opensearch/search/lookup/FieldsLookup.java +++ b/server/src/main/java/org/opensearch/search/lookup/FieldsLookup.java @@ -32,22 +32,18 @@ package org.opensearch.search.lookup; import org.apache.lucene.index.LeafReaderContext; -import org.opensearch.common.Nullable; import org.opensearch.index.mapper.MapperService; public class FieldsLookup { private final MapperService mapperService; - @Nullable - private final String[] types; - FieldsLookup(MapperService mapperService, @Nullable String[] types) { + FieldsLookup(MapperService mapperService) { this.mapperService = mapperService; - this.types = types; } public LeafFieldsLookup getLeafFieldsLookup(LeafReaderContext context) { - return new LeafFieldsLookup(mapperService, types, context.reader()); + return new LeafFieldsLookup(mapperService, context.reader()); } } diff --git a/server/src/main/java/org/opensearch/search/lookup/LeafDocLookup.java b/server/src/main/java/org/opensearch/search/lookup/LeafDocLookup.java index 4174ac2c55058..82daa94d92146 100644 --- a/server/src/main/java/org/opensearch/search/lookup/LeafDocLookup.java +++ b/server/src/main/java/org/opensearch/search/lookup/LeafDocLookup.java @@ -33,7 +33,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.opensearch.ExceptionsHelper; -import org.opensearch.common.Nullable; import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.index.fielddata.IndexFieldData; import org.opensearch.index.fielddata.ScriptDocValues; @@ -43,7 +42,6 @@ import java.io.IOException; import java.security.AccessController; import java.security.PrivilegedAction; -import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Map; @@ -60,23 +58,13 @@ public class LeafDocLookup implements Map> { private final MapperService mapperService; private final Function> fieldDataLookup; - - @Nullable - private final String[] types; - private final LeafReaderContext reader; private int docId = -1; - LeafDocLookup( - MapperService mapperService, - Function> fieldDataLookup, - @Nullable String[] types, - LeafReaderContext reader - ) { + LeafDocLookup(MapperService mapperService, Function> fieldDataLookup, LeafReaderContext reader) { this.mapperService = mapperService; this.fieldDataLookup = fieldDataLookup; - this.types = types; this.reader = reader; } @@ -100,9 +88,7 @@ public ScriptDocValues get(Object key) { if (scriptValues == null) { final MappedFieldType fieldType = mapperService.fieldType(fieldName); if (fieldType == null) { - throw new IllegalArgumentException( - "No field found for [" + fieldName + "] in mapping with types " + Arrays.toString(types) - ); + throw new IllegalArgumentException("No field found for [" + fieldName + "] in mapping"); } // load fielddata on behalf of the script: otherwise it would need additional permissions // to deal with pagedbytes/ramusagestimator/etc diff --git a/server/src/main/java/org/opensearch/search/lookup/LeafFieldsLookup.java b/server/src/main/java/org/opensearch/search/lookup/LeafFieldsLookup.java index 6b56a5bf14acf..14c5dade52c87 100644 --- a/server/src/main/java/org/opensearch/search/lookup/LeafFieldsLookup.java +++ b/server/src/main/java/org/opensearch/search/lookup/LeafFieldsLookup.java @@ -33,7 +33,6 @@ import org.apache.lucene.index.LeafReader; import org.opensearch.OpenSearchParseException; -import org.opensearch.common.Nullable; import org.opensearch.index.fieldvisitor.SingleFieldsVisitor; import org.opensearch.index.mapper.DocumentMapper; import org.opensearch.index.mapper.MappedFieldType; @@ -42,7 +41,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; @@ -54,19 +52,14 @@ public class LeafFieldsLookup implements Map { private final MapperService mapperService; - - @Nullable - private final String[] types; - private final LeafReader reader; private int docId = -1; private final Map cachedFieldData = new HashMap<>(); - LeafFieldsLookup(MapperService mapperService, @Nullable String[] types, LeafReader reader) { + LeafFieldsLookup(MapperService mapperService, LeafReader reader) { this.mapperService = mapperService; - this.types = types; this.reader = reader; } @@ -148,7 +141,7 @@ private FieldLookup loadFieldData(String name) { if (data == null) { MappedFieldType fieldType = mapperService.fieldType(name); if (fieldType == null) { - throw new IllegalArgumentException("No field found for [" + name + "] in mapping with types " + Arrays.toString(types)); + throw new IllegalArgumentException("No field found for [" + name + "] in mapping"); } data = new FieldLookup(fieldType); cachedFieldData.put(name, data); diff --git a/server/src/main/java/org/opensearch/search/lookup/SearchLookup.java b/server/src/main/java/org/opensearch/search/lookup/SearchLookup.java index 269052f895066..11feb3f2f9e57 100644 --- a/server/src/main/java/org/opensearch/search/lookup/SearchLookup.java +++ b/server/src/main/java/org/opensearch/search/lookup/SearchLookup.java @@ -33,7 +33,6 @@ package org.opensearch.search.lookup; import org.apache.lucene.index.LeafReaderContext; -import org.opensearch.common.Nullable; import org.opensearch.index.fielddata.IndexFieldData; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.MapperService; @@ -75,17 +74,15 @@ public class SearchLookup { */ public SearchLookup( MapperService mapperService, - BiFunction, IndexFieldData> fieldDataLookup, - @Nullable String[] types + BiFunction, IndexFieldData> fieldDataLookup ) { this.fieldChain = Collections.emptySet(); docMap = new DocLookup( mapperService, - fieldType -> fieldDataLookup.apply(fieldType, () -> forkAndTrackFieldReferences(fieldType.name())), - types + fieldType -> fieldDataLookup.apply(fieldType, () -> forkAndTrackFieldReferences(fieldType.name())) ); sourceLookup = new SourceLookup(); - fieldsLookup = new FieldsLookup(mapperService, types); + fieldsLookup = new FieldsLookup(mapperService); this.fieldDataLookup = fieldDataLookup; } @@ -100,8 +97,7 @@ private SearchLookup(SearchLookup searchLookup, Set fieldChain) { this.fieldChain = Collections.unmodifiableSet(fieldChain); this.docMap = new DocLookup( searchLookup.docMap.mapperService(), - fieldType -> searchLookup.fieldDataLookup.apply(fieldType, () -> forkAndTrackFieldReferences(fieldType.name())), - searchLookup.docMap.getTypes() + fieldType -> searchLookup.fieldDataLookup.apply(fieldType, () -> forkAndTrackFieldReferences(fieldType.name())) ); this.sourceLookup = searchLookup.sourceLookup; this.fieldsLookup = searchLookup.fieldsLookup; diff --git a/server/src/main/java/org/opensearch/snapshots/InFlightShardSnapshotStates.java b/server/src/main/java/org/opensearch/snapshots/InFlightShardSnapshotStates.java index 81749b274bd96..469b57562284f 100644 --- a/server/src/main/java/org/opensearch/snapshots/InFlightShardSnapshotStates.java +++ b/server/src/main/java/org/opensearch/snapshots/InFlightShardSnapshotStates.java @@ -96,8 +96,8 @@ private static void addStateInformation( busyIds.computeIfAbsent(indexName, k -> new HashSet<>()).add(shardId); assert assertGenerationConsistency(generations, indexName, shardId, shardState.generation()); } else if (shardState.state() == SnapshotsInProgress.ShardState.SUCCESS) { - assert busyIds.getOrDefault(indexName, Collections.emptySet()) - .contains(shardId) == false : "Can't have a successful operation queued after an in-progress operation"; + assert busyIds.getOrDefault(indexName, Collections.emptySet()).contains(shardId) == false + : "Can't have a successful operation queued after an in-progress operation"; generations.computeIfAbsent(indexName, k -> new HashMap<>()).put(shardId, shardState.generation()); } } diff --git a/server/src/main/java/org/opensearch/snapshots/SnapshotShardsService.java b/server/src/main/java/org/opensearch/snapshots/SnapshotShardsService.java index 1c357ca79202f..b6c0b63efe3d3 100644 --- a/server/src/main/java/org/opensearch/snapshots/SnapshotShardsService.java +++ b/server/src/main/java/org/opensearch/snapshots/SnapshotShardsService.java @@ -50,6 +50,7 @@ import org.opensearch.common.Nullable; import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.settings.Settings; import org.opensearch.core.internal.io.IOUtils; @@ -274,9 +275,8 @@ private void startNewShards(SnapshotsInProgress.Entry entry, Map() { @@ -369,25 +369,25 @@ private void snapshot( } final Repository repository = repositoriesService.repository(snapshot.getRepository()); - Engine.IndexCommitRef snapshotRef = null; + GatedCloseable wrappedSnapshot = null; try { // we flush first to make sure we get the latest writes snapshotted - snapshotRef = indexShard.acquireLastIndexCommit(true); - final IndexCommit snapshotIndexCommit = snapshotRef.getIndexCommit(); + wrappedSnapshot = indexShard.acquireLastIndexCommit(true); + final IndexCommit snapshotIndexCommit = wrappedSnapshot.get(); repository.snapshotShard( indexShard.store(), indexShard.mapperService(), snapshot.getSnapshotId(), indexId, - snapshotRef.getIndexCommit(), + wrappedSnapshot.get(), getShardStateId(indexShard, snapshotIndexCommit), snapshotStatus, version, userMetadata, - ActionListener.runBefore(listener, snapshotRef::close) + ActionListener.runBefore(listener, wrappedSnapshot::close) ); } catch (Exception e) { - IOUtils.close(snapshotRef); + IOUtils.close(wrappedSnapshot); throw e; } } catch (Exception e) { diff --git a/server/src/main/java/org/opensearch/snapshots/SnapshotsService.java b/server/src/main/java/org/opensearch/snapshots/SnapshotsService.java index 3ab5cd64ff11f..122c13337fa70 100644 --- a/server/src/main/java/org/opensearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/opensearch/snapshots/SnapshotsService.java @@ -1924,8 +1924,8 @@ private void runReadyDeletions(RepositoryData repositoryData, String repository) @Override public ClusterState execute(ClusterState currentState) { - assert readyDeletions(currentState) - .v1() == currentState : "Deletes should have been set to ready by finished snapshot deletes and finalizations"; + assert readyDeletions(currentState).v1() == currentState + : "Deletes should have been set to ready by finished snapshot deletes and finalizations"; for (SnapshotDeletionsInProgress.Entry entry : currentState.custom( SnapshotDeletionsInProgress.TYPE, SnapshotDeletionsInProgress.EMPTY @@ -2667,8 +2667,8 @@ private void deleteSnapshotsFromRepository(SnapshotDeletionsInProgress.Entry del repositoriesService.getRepositoryData(deleteEntry.repository(), new ActionListener() { @Override public void onResponse(RepositoryData repositoryData) { - assert repositoryData - .getGenId() == expectedRepoGen : "Repository generation should not change as long as a ready delete is found in the cluster state but found [" + assert repositoryData.getGenId() == expectedRepoGen + : "Repository generation should not change as long as a ready delete is found in the cluster state but found [" + expectedRepoGen + "] in cluster state and [" + repositoryData.getGenId() @@ -2746,9 +2746,8 @@ protected SnapshotDeletionsInProgress filterDeletions(SnapshotDeletionsInProgres @Override protected void handleListeners(List> deleteListeners) { - assert repositoryData.getSnapshotIds() - .stream() - .noneMatch(deleteEntry.getSnapshots()::contains) : "Repository data contained snapshot ids " + assert repositoryData.getSnapshotIds().stream().noneMatch(deleteEntry.getSnapshots()::contains) + : "Repository data contained snapshot ids " + repositoryData.getSnapshotIds() + " that should should been deleted by [" + deleteEntry @@ -2866,12 +2865,8 @@ public final void clusterStateProcessed(String source, ClusterState oldState, Cl } } else { leaveRepoLoop(deleteEntry.repository()); - assert readyDeletions.stream() - .noneMatch(entry -> entry.repository().equals(deleteEntry.repository())) : "New finalizations " - + newFinalizations - + " added even though deletes " - + readyDeletions - + " are ready"; + assert readyDeletions.stream().noneMatch(entry -> entry.repository().equals(deleteEntry.repository())) + : "New finalizations " + newFinalizations + " added even though deletes " + readyDeletions + " are ready"; for (SnapshotsInProgress.Entry entry : newFinalizations) { endSnapshot(entry, newState.metadata(), repositoryData); } @@ -3837,8 +3832,8 @@ synchronized boolean assertNotQueued(Snapshot snapshot) { synchronized boolean assertConsistent() { assert (latestKnownMetaData == null && snapshotsToFinalize.isEmpty()) - || (latestKnownMetaData != null - && snapshotsToFinalize.isEmpty() == false) : "Should not hold on to metadata if there are no more queued snapshots"; + || (latestKnownMetaData != null && snapshotsToFinalize.isEmpty() == false) + : "Should not hold on to metadata if there are no more queued snapshots"; assert snapshotsToFinalize.values().stream().noneMatch(Collection::isEmpty) : "Found empty queue in " + snapshotsToFinalize; return true; } diff --git a/server/src/main/java/org/opensearch/tasks/TaskResultsService.java b/server/src/main/java/org/opensearch/tasks/TaskResultsService.java index 8767ce95d3352..e22793e057c6a 100644 --- a/server/src/main/java/org/opensearch/tasks/TaskResultsService.java +++ b/server/src/main/java/org/opensearch/tasks/TaskResultsService.java @@ -80,13 +80,11 @@ public class TaskResultsService { public static final String TASK_INDEX = ".tasks"; - public static final String TASK_TYPE = "task"; - public static final String TASK_RESULT_INDEX_MAPPING_FILE = "task-index-mapping.json"; public static final String TASK_RESULT_MAPPING_VERSION_META_FIELD = "version"; - public static final int TASK_RESULT_MAPPING_VERSION = 3; + public static final int TASK_RESULT_MAPPING_VERSION = 3; // must match version in task-index-mapping.json /** * The backoff policy to use when saving a task result fails. The total wait @@ -115,7 +113,7 @@ public void storeResult(TaskResult taskResult, ActionListener listener) { CreateIndexRequest createIndexRequest = new CreateIndexRequest(); createIndexRequest.settings(taskResultIndexSettings()); createIndexRequest.index(TASK_INDEX); - createIndexRequest.mapping(TASK_TYPE, taskResultIndexMapping(), XContentType.JSON); + createIndexRequest.mapping(taskResultIndexMapping()); createIndexRequest.cause("auto(task api)"); client.admin().indices().create(createIndexRequest, new ActionListener() { @@ -146,7 +144,6 @@ public void onFailure(Exception e) { client.admin() .indices() .preparePutMapping(TASK_INDEX) - .setType(TASK_TYPE) .setSource(taskResultIndexMapping(), XContentType.JSON) .execute(ActionListener.delegateFailure(listener, (l, r) -> doStoreResult(taskResult, listener))); } else { @@ -156,7 +153,7 @@ public void onFailure(Exception e) { } private int getTaskResultMappingVersion(IndexMetadata metadata) { - MappingMetadata mappingMetadata = metadata.getMappings().get(TASK_TYPE); + MappingMetadata mappingMetadata = metadata.mapping(); if (mappingMetadata == null) { return 0; } @@ -169,7 +166,7 @@ private int getTaskResultMappingVersion(IndexMetadata metadata) { } private void doStoreResult(TaskResult taskResult, ActionListener listener) { - IndexRequestBuilder index = client.prepareIndex(TASK_INDEX, TASK_TYPE, taskResult.getTask().getTaskId().toString()); + IndexRequestBuilder index = client.prepareIndex(TASK_INDEX).setId(taskResult.getTask().getTaskId().toString()); try (XContentBuilder builder = XContentFactory.contentBuilder(Requests.INDEX_CONTENT_TYPE)) { taskResult.toXContent(builder, ToXContent.EMPTY_PARAMS); index.setSource(builder); diff --git a/server/src/main/java/org/opensearch/transport/Transports.java b/server/src/main/java/org/opensearch/transport/Transports.java index bce428221fb75..7d9f013db5338 100644 --- a/server/src/main/java/org/opensearch/transport/Transports.java +++ b/server/src/main/java/org/opensearch/transport/Transports.java @@ -77,11 +77,8 @@ public static boolean assertNotTransportThread(String reason) { public static boolean assertDefaultThreadContext(ThreadContext threadContext) { assert threadContext.getRequestHeadersOnly().isEmpty() - || threadContext.getRequestHeadersOnly().size() == 1 - && threadContext.getRequestHeadersOnly().containsKey(Task.X_OPAQUE_ID) : "expected empty context but was " - + threadContext.getRequestHeadersOnly() - + " on " - + Thread.currentThread().getName(); + || threadContext.getRequestHeadersOnly().size() == 1 && threadContext.getRequestHeadersOnly().containsKey(Task.X_OPAQUE_ID) + : "expected empty context but was " + threadContext.getRequestHeadersOnly() + " on " + Thread.currentThread().getName(); return true; } } diff --git a/server/src/main/resources/org/opensearch/tasks/task-index-mapping.json b/server/src/main/resources/org/opensearch/tasks/task-index-mapping.json index 76b07bf3570f2..54e9d39902f03 100644 --- a/server/src/main/resources/org/opensearch/tasks/task-index-mapping.json +++ b/server/src/main/resources/org/opensearch/tasks/task-index-mapping.json @@ -1,5 +1,5 @@ { - "task" : { + "_doc" : { "_meta": { "version": 3 }, diff --git a/server/src/test/java/org/opensearch/ExceptionSerializationTests.java b/server/src/test/java/org/opensearch/ExceptionSerializationTests.java index a1455a715e461..b5859e1fb18a9 100644 --- a/server/src/test/java/org/opensearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/opensearch/ExceptionSerializationTests.java @@ -465,11 +465,10 @@ public void testSearchPhaseExecutionException() throws IOException { } public void testRoutingMissingException() throws IOException { - RoutingMissingException ex = serialize(new RoutingMissingException("idx", "type", "id")); + RoutingMissingException ex = serialize(new RoutingMissingException("idx", "id")); assertEquals("idx", ex.getIndex().getName()); - assertEquals("type", ex.getType()); assertEquals("id", ex.getId()); - assertEquals("routing is required for [idx]/[type]/[id]", ex.getMessage()); + assertEquals("routing is required for [idx]/[id]", ex.getMessage()); } public void testRepositoryException() throws IOException { diff --git a/server/src/test/java/org/opensearch/OpenSearchExceptionTests.java b/server/src/test/java/org/opensearch/OpenSearchExceptionTests.java index 3d0b334622cd5..9f32af143ee2d 100644 --- a/server/src/test/java/org/opensearch/OpenSearchExceptionTests.java +++ b/server/src/test/java/org/opensearch/OpenSearchExceptionTests.java @@ -566,7 +566,7 @@ public void testFromXContent() throws IOException { public void testFromXContentWithCause() throws IOException { OpenSearchException e = new OpenSearchException( "foo", - new OpenSearchException("bar", new OpenSearchException("baz", new RoutingMissingException("_test", "_type", "_id"))) + new OpenSearchException("bar", new OpenSearchException("baz", new RoutingMissingException("_test", "_id"))) ); final XContent xContent = randomFrom(XContentType.values()).xContent(); @@ -594,7 +594,7 @@ public void testFromXContentWithCause() throws IOException { cause = (OpenSearchException) cause.getCause(); assertEquals( cause.getMessage(), - "OpenSearch exception [type=routing_missing_exception, reason=routing is required for [_test]/[_type]/[_id]]" + "OpenSearch exception [type=routing_missing_exception, reason=routing is required for [_test]/[_id]]" ); assertThat(cause.getHeaderKeys(), hasSize(0)); assertThat(cause.getMetadataKeys(), hasSize(2)); @@ -603,7 +603,7 @@ public void testFromXContentWithCause() throws IOException { } public void testFromXContentWithHeadersAndMetadata() throws IOException { - RoutingMissingException routing = new RoutingMissingException("_test", "_type", "_id"); + RoutingMissingException routing = new RoutingMissingException("_test", "_id"); OpenSearchException baz = new OpenSearchException("baz", routing); baz.addHeader("baz_0", "baz0"); baz.addMetadata("opensearch.baz_1", "baz1"); @@ -656,7 +656,7 @@ public void testFromXContentWithHeadersAndMetadata() throws IOException { cause = (OpenSearchException) cause.getCause(); assertEquals( cause.getMessage(), - "OpenSearch exception [type=routing_missing_exception, reason=routing is required for [_test]/[_type]/[_id]]" + "OpenSearch exception [type=routing_missing_exception, reason=routing is required for [_test]/[_id]]" ); assertThat(cause.getHeaderKeys(), hasSize(0)); assertThat(cause.getMetadataKeys(), hasSize(2)); @@ -878,11 +878,11 @@ public void testFailureToAndFromXContentWithDetails() throws IOException { break; case 4: // JDK exception with cause - failureCause = new RoutingMissingException("idx", "type", "id"); + failureCause = new RoutingMissingException("idx", "id"); failure = new RuntimeException("E", failureCause); expectedCause = new OpenSearchException( - "OpenSearch exception [type=routing_missing_exception, " + "reason=routing is required for [idx]/[type]/[id]]" + "OpenSearch exception [type=routing_missing_exception, " + "reason=routing is required for [idx]/[id]]" ); expectedCause.addMetadata("opensearch.index", "idx"); expectedCause.addMetadata("opensearch.index_uuid", "_na_"); diff --git a/server/src/test/java/org/opensearch/action/DocWriteResponseTests.java b/server/src/test/java/org/opensearch/action/DocWriteResponseTests.java index e80d5b1c70bd1..30dcaf8d9c1c1 100644 --- a/server/src/test/java/org/opensearch/action/DocWriteResponseTests.java +++ b/server/src/test/java/org/opensearch/action/DocWriteResponseTests.java @@ -39,6 +39,7 @@ import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.index.shard.ShardId; import org.opensearch.test.OpenSearchTestCase; @@ -53,7 +54,6 @@ public class DocWriteResponseTests extends OpenSearchTestCase { public void testGetLocation() { final DocWriteResponse response = new DocWriteResponse( new ShardId("index", "uuid", 0), - "type", "id", SequenceNumbers.UNASSIGNED_SEQ_NO, 17, @@ -61,14 +61,13 @@ public void testGetLocation() { Result.CREATED ) { }; - assertEquals("/index/type/id", response.getLocation(null)); - assertEquals("/index/type/id?routing=test_routing", response.getLocation("test_routing")); + assertEquals("/index/" + MapperService.SINGLE_MAPPING_NAME + "/id", response.getLocation(null)); + assertEquals("/index/" + MapperService.SINGLE_MAPPING_NAME + "/id?routing=test_routing", response.getLocation("test_routing")); } public void testGetLocationNonAscii() { final DocWriteResponse response = new DocWriteResponse( new ShardId("index", "uuid", 0), - "type", "❤", SequenceNumbers.UNASSIGNED_SEQ_NO, 17, @@ -76,14 +75,13 @@ public void testGetLocationNonAscii() { Result.CREATED ) { }; - assertEquals("/index/type/%E2%9D%A4", response.getLocation(null)); - assertEquals("/index/type/%E2%9D%A4?routing=%C3%A4", response.getLocation("ä")); + assertEquals("/index/" + MapperService.SINGLE_MAPPING_NAME + "/%E2%9D%A4", response.getLocation(null)); + assertEquals("/index/" + MapperService.SINGLE_MAPPING_NAME + "/%E2%9D%A4?routing=%C3%A4", response.getLocation("ä")); } public void testGetLocationWithSpaces() { final DocWriteResponse response = new DocWriteResponse( new ShardId("index", "uuid", 0), - "type", "a b", SequenceNumbers.UNASSIGNED_SEQ_NO, 17, @@ -91,8 +89,8 @@ public void testGetLocationWithSpaces() { Result.CREATED ) { }; - assertEquals("/index/type/a+b", response.getLocation(null)); - assertEquals("/index/type/a+b?routing=c+d", response.getLocation("c d")); + assertEquals("/index/" + MapperService.SINGLE_MAPPING_NAME + "/a+b", response.getLocation(null)); + assertEquals("/index/" + MapperService.SINGLE_MAPPING_NAME + "/a+b?routing=c+d", response.getLocation("c d")); } /** @@ -102,7 +100,6 @@ public void testGetLocationWithSpaces() { public void testToXContentDoesntIncludeForcedRefreshUnlessForced() throws IOException { DocWriteResponse response = new DocWriteResponse( new ShardId("index", "uuid", 0), - "type", "id", SequenceNumbers.UNASSIGNED_SEQ_NO, 17, diff --git a/server/src/test/java/org/opensearch/action/ShardValidateQueryRequestTests.java b/server/src/test/java/org/opensearch/action/ShardValidateQueryRequestTests.java index 2b6c5284546cf..726f77b0ddf13 100644 --- a/server/src/test/java/org/opensearch/action/ShardValidateQueryRequestTests.java +++ b/server/src/test/java/org/opensearch/action/ShardValidateQueryRequestTests.java @@ -69,17 +69,15 @@ public void testSerialize() throws IOException { validateQueryRequest.query(QueryBuilders.termQuery("field", "value")); validateQueryRequest.rewrite(true); validateQueryRequest.explain(false); - validateQueryRequest.types("type1", "type2"); ShardValidateQueryRequest request = new ShardValidateQueryRequest( new ShardId("index", "foobar", 1), - new AliasFilter(QueryBuilders.termQuery("filter_field", "value"), new String[] { "alias0", "alias1" }), + new AliasFilter(QueryBuilders.termQuery("filter_field", "value"), "alias0", "alias1"), validateQueryRequest ); request.writeTo(output); try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { ShardValidateQueryRequest readRequest = new ShardValidateQueryRequest(in); assertEquals(request.filteringAliases(), readRequest.filteringAliases()); - assertArrayEquals(request.types(), readRequest.types()); assertEquals(request.explain(), readRequest.explain()); assertEquals(request.query(), readRequest.query()); assertEquals(request.rewrite(), readRequest.rewrite()); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java index cfe3e9779314f..320db79428300 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java @@ -35,11 +35,8 @@ import org.opensearch.OpenSearchParseException; import org.opensearch.action.admin.indices.alias.Alias; import org.opensearch.common.Strings; -import org.opensearch.common.bytes.BytesReference; -import org.opensearch.common.collect.MapBuilder; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.NamedXContentRegistry; import org.opensearch.common.xcontent.XContentBuilder; @@ -47,24 +44,23 @@ import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.index.RandomCreateIndexGenerator; +import org.opensearch.index.mapper.MapperService; import org.opensearch.test.OpenSearchTestCase; -import org.opensearch.test.hamcrest.OpenSearchAssertions; import java.io.IOException; import java.util.Map; import java.util.Set; -import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; -import static org.opensearch.common.xcontent.ToXContent.EMPTY_PARAMS; import static org.hamcrest.CoreMatchers.equalTo; public class CreateIndexRequestTests extends OpenSearchTestCase { public void testSerialization() throws IOException { CreateIndexRequest request = new CreateIndexRequest("foo"); - String mapping = Strings.toString(JsonXContent.contentBuilder().startObject().startObject("my_type").endObject().endObject()); - request.mapping("my_type", mapping, XContentType.JSON); + String mapping = Strings.toString( + JsonXContent.contentBuilder().startObject().startObject(MapperService.SINGLE_MAPPING_NAME).endObject().endObject() + ); + request.mapping(mapping); try (BytesStreamOutput output = new BytesStreamOutput()) { request.writeTo(output); @@ -72,7 +68,7 @@ public void testSerialization() throws IOException { try (StreamInput in = output.bytes().streamInput()) { CreateIndexRequest serialized = new CreateIndexRequest(in); assertEquals(request.index(), serialized.index()); - assertEquals(mapping, serialized.mappings().get("my_type")); + assertEquals("{\"_doc\":{}}", serialized.mappings()); } } } @@ -101,36 +97,6 @@ public void testTopLevelKeys() { assertEquals("unknown key [FOO_SHOULD_BE_ILLEGAL_HERE] for create index", e.getMessage()); } - public void testToXContent() throws IOException { - CreateIndexRequest request = new CreateIndexRequest("foo"); - - String mapping; - if (randomBoolean()) { - mapping = Strings.toString(JsonXContent.contentBuilder().startObject().startObject("my_type").endObject().endObject()); - } else { - mapping = Strings.toString(JsonXContent.contentBuilder().startObject().endObject()); - } - request.mapping("my_type", mapping, XContentType.JSON); - - Alias alias = new Alias("test_alias"); - alias.routing("1"); - alias.filter("{\"term\":{\"year\":2016}}"); - alias.writeIndex(true); - request.alias(alias); - - Settings.Builder settings = Settings.builder(); - settings.put(SETTING_NUMBER_OF_SHARDS, 10); - request.settings(settings); - - String actualRequestBody = Strings.toString(request); - - String expectedRequestBody = "{\"settings\":{\"index\":{\"number_of_shards\":\"10\"}}," - + "\"mappings\":{\"my_type\":{\"my_type\":{}}}," - + "\"aliases\":{\"test_alias\":{\"filter\":{\"term\":{\"year\":2016}},\"routing\":\"1\",\"is_write_index\":true}}}"; - - assertEquals(expectedRequestBody, actualRequestBody); - } - public void testMappingKeyedByType() throws IOException { CreateIndexRequest request1 = new CreateIndexRequest("foo"); CreateIndexRequest request2 = new CreateIndexRequest("bar"); @@ -171,48 +137,6 @@ public void testMappingKeyedByType() throws IOException { request2.mapping("type1", builder); assertEquals(request1.mappings(), request2.mappings()); } - { - request1 = new CreateIndexRequest("foo"); - request2 = new CreateIndexRequest("bar"); - String nakedMapping = "{\"properties\": {\"foo\": {\"type\": \"integer\"}}}"; - request1.mapping("type2", nakedMapping, XContentType.JSON); - request2.mapping("type2", "{\"type2\": " + nakedMapping + "}", XContentType.JSON); - assertEquals(request1.mappings(), request2.mappings()); - } - { - request1 = new CreateIndexRequest("foo"); - request2 = new CreateIndexRequest("bar"); - Map nakedMapping = MapBuilder.newMapBuilder() - .put( - "properties", - MapBuilder.newMapBuilder() - .put("bar", MapBuilder.newMapBuilder().put("type", "scaled_float").put("scaling_factor", 100).map()) - .map() - ) - .map(); - request1.mapping("type3", nakedMapping); - request2.mapping("type3", MapBuilder.newMapBuilder().put("type3", nakedMapping).map()); - assertEquals(request1.mappings(), request2.mappings()); - } - } - - public void testToAndFromXContent() throws IOException { - - final CreateIndexRequest createIndexRequest = RandomCreateIndexGenerator.randomCreateIndexRequest(); - - boolean humanReadable = randomBoolean(); - final XContentType xContentType = randomFrom(XContentType.values()); - BytesReference originalBytes = toShuffledXContent(createIndexRequest, xContentType, EMPTY_PARAMS, humanReadable); - - CreateIndexRequest parsedCreateIndexRequest = new CreateIndexRequest(); - parsedCreateIndexRequest.source(originalBytes, xContentType); - - assertMappingsEqual(createIndexRequest.mappings(), parsedCreateIndexRequest.mappings()); - assertAliasesEqual(createIndexRequest.aliases(), parsedCreateIndexRequest.aliases()); - assertEquals(createIndexRequest.settings(), parsedCreateIndexRequest.settings()); - - BytesReference finalBytes = toShuffledXContent(parsedCreateIndexRequest, xContentType, EMPTY_PARAMS, humanReadable); - OpenSearchAssertions.assertToXContentEquivalent(originalBytes, finalBytes, xContentType); } public void testSettingsType() throws IOException { diff --git a/server/src/test/java/org/opensearch/action/admin/indices/get/GetIndexResponseTests.java b/server/src/test/java/org/opensearch/action/admin/indices/get/GetIndexResponseTests.java index 7b03a4bc7bc64..f712b93b409dc 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/get/GetIndexResponseTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/get/GetIndexResponseTests.java @@ -41,26 +41,16 @@ import org.opensearch.common.io.stream.Writeable; import org.opensearch.common.settings.IndexScopedSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.index.RandomCreateIndexGenerator; -import org.opensearch.rest.BaseRestHandler; -import org.opensearch.test.AbstractSerializingTestCase; +import org.opensearch.test.AbstractWireSerializingTestCase; -import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Locale; -import java.util.function.Predicate; -public class GetIndexResponseTests extends AbstractSerializingTestCase { - - @Override - protected GetIndexResponse doParseInstance(XContentParser parser) throws IOException { - return GetIndexResponse.fromXContent(parser); - } +public class GetIndexResponseTests extends AbstractWireSerializingTestCase { @Override protected Writeable.Reader instanceReader() { @@ -70,7 +60,7 @@ protected Writeable.Reader instanceReader() { @Override protected GetIndexResponse createTestInstance() { String[] indices = generateRandomStringArray(5, 5, false, false); - ImmutableOpenMap.Builder> mappings = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(); ImmutableOpenMap.Builder> aliases = ImmutableOpenMap.builder(); ImmutableOpenMap.Builder settings = ImmutableOpenMap.builder(); ImmutableOpenMap.Builder defaultSettings = ImmutableOpenMap.builder(); @@ -78,9 +68,7 @@ protected GetIndexResponse createTestInstance() { IndexScopedSettings indexScopedSettings = IndexScopedSettings.DEFAULT_SCOPED_SETTINGS; boolean includeDefaults = randomBoolean(); for (String index : indices) { - // rarely have no types - int typeCount = rarely() ? 0 : 1; - mappings.put(index, GetMappingsResponseTests.createMappingsForIndex(typeCount, true)); + mappings.put(index, GetMappingsResponseTests.createMappingsForIndex()); List aliasMetadataList = new ArrayList<>(); int aliasesNum = randomIntBetween(0, 3); @@ -111,19 +99,4 @@ protected GetIndexResponse createTestInstance() { dataStreams.build() ); } - - @Override - protected Predicate getRandomFieldsExcludeFilter() { - // we do not want to add new fields at the root (index-level), or inside the blocks - return f -> f.equals("") || f.contains(".settings") || f.contains(".defaults") || f.contains(".mappings") || f.contains(".aliases"); - } - - /** - * For xContent roundtrip testing we force the xContent output to still contain types because the parser still expects them. - * The new typeless parsing is implemented in the client side GetIndexResponse. - */ - @Override - protected ToXContent.Params getToXContentParams() { - return new ToXContent.MapParams(Collections.singletonMap(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, "true")); - } } diff --git a/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java b/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java index 2c1ed98bbaeac..512e21cc28469 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java @@ -33,87 +33,42 @@ package org.opensearch.action.admin.indices.mapping.get; import org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata; +import org.opensearch.common.Strings; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.Writeable; -import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.test.AbstractSerializingTestCase; +import org.opensearch.test.AbstractWireSerializingTestCase; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; -import java.util.function.Predicate; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; -import static org.hamcrest.CoreMatchers.equalTo; - -public class GetFieldMappingsResponseTests extends AbstractSerializingTestCase { +public class GetFieldMappingsResponseTests extends AbstractWireSerializingTestCase { public void testManualSerialization() throws IOException { - Map>> mappings = new HashMap<>(); + Map> mappings = new HashMap<>(); FieldMappingMetadata fieldMappingMetadata = new FieldMappingMetadata("my field", new BytesArray("{}")); - mappings.put("index", Collections.singletonMap("type", Collections.singletonMap("field", fieldMappingMetadata))); + mappings.put("index", Collections.singletonMap("field", fieldMappingMetadata)); GetFieldMappingsResponse response = new GetFieldMappingsResponse(mappings); try (BytesStreamOutput out = new BytesStreamOutput()) { response.writeTo(out); try (StreamInput in = StreamInput.wrap(out.bytes().toBytesRef().bytes)) { GetFieldMappingsResponse serialized = new GetFieldMappingsResponse(in); - FieldMappingMetadata metadata = serialized.fieldMappings("index", "type", "field"); + FieldMappingMetadata metadata = serialized.fieldMappings("index", "field"); assertNotNull(metadata); assertEquals(new BytesArray("{}"), metadata.getSource()); } } } - public void testManualJunkedJson() throws Exception { - // in fact random fields could be evaluated as proper mapping, while proper junk in this case is arrays and values - final String json = "{\"index1\":{\"mappings\":" - + "{\"doctype0\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," - + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}," - // junk here - + "\"junk1\": [\"field1\", {\"field2\":{}}]," - + "\"junk2\": [{\"field3\":{}}]," - + "\"junk3\": 42," - + "\"junk4\": \"Q\"," - + "\"doctype1\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," - + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}}}," - + "\"index0\":{\"mappings\":" - + "{\"doctype0\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," - + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}," - + "\"doctype1\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," - + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}}}}"; - - final XContentParser parser = XContentType.JSON.xContent() - .createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, json.getBytes("UTF-8")); - - final GetFieldMappingsResponse response = GetFieldMappingsResponse.fromXContent(parser); - - FieldMappingMetadata fieldMappingMetadata = new FieldMappingMetadata("my field", new BytesArray("{\"type\":\"keyword\"}")); - Map fieldMapping = new HashMap<>(); - fieldMapping.put("field0", fieldMappingMetadata); - fieldMapping.put("field1", fieldMappingMetadata); - - Map> typeMapping = new HashMap<>(); - typeMapping.put("doctype0", fieldMapping); - typeMapping.put("doctype1", fieldMapping); - - Map>> mappings = new HashMap<>(); - mappings.put("index0", typeMapping); - mappings.put("index1", typeMapping); - - final Map>> responseMappings = response.mappings(); - assertThat(responseMappings, equalTo(mappings)); - } - - @Override - protected GetFieldMappingsResponse doParseInstance(XContentParser parser) throws IOException { - return GetFieldMappingsResponse.fromXContent(parser); + public void testNullFieldMappingToXContent() { + Map> mappings = new HashMap<>(); + mappings.put("index", Collections.emptyMap()); + GetFieldMappingsResponse response = new GetFieldMappingsResponse(mappings); + assertEquals("{\"index\":{\"mappings\":{}}}", Strings.toString(response)); } @Override @@ -126,41 +81,18 @@ protected Writeable.Reader instanceReader() { return GetFieldMappingsResponse::new; } - @Override - protected Predicate getRandomFieldsExcludeFilter() { - // allow random fields at the level of `index` and `index.mappings.doctype.field` - // otherwise random field could be evaluated as index name or type name - return s -> false == (s.matches("(?[^.]+)") - || s.matches("(?[^.]+)\\.mappings\\.(?[^.]+)\\.(?[^.]+)")); - } - - /** - * For xContent roundtrip testing we force the xContent output to still contain types because the parser - * still expects them. The new typeless parsing is implemented in the client side GetFieldMappingsResponse. - */ - @Override - protected ToXContent.Params getToXContentParams() { - return new ToXContent.MapParams(Collections.singletonMap(INCLUDE_TYPE_NAME_PARAMETER, "true")); - } - - private Map>> randomMapping() { - Map>> mappings = new HashMap<>(); + private Map> randomMapping() { + Map> mappings = new HashMap<>(); int indices = randomInt(10); for (int i = 0; i < indices; i++) { - final Map> doctypesMappings = new HashMap<>(); - int doctypes = randomInt(10); - for (int j = 0; j < doctypes; j++) { - Map fieldMappings = new HashMap<>(); - int fields = randomInt(10); - for (int k = 0; k < fields; k++) { - final String mapping = randomBoolean() ? "{\"type\":\"string\"}" : "{\"type\":\"keyword\"}"; - FieldMappingMetadata metadata = new FieldMappingMetadata("my field", new BytesArray(mapping)); - fieldMappings.put("field" + k, metadata); - } - doctypesMappings.put("doctype" + j, fieldMappings); + Map fieldMappings = new HashMap<>(); + int fields = randomInt(10); + for (int k = 0; k < fields; k++) { + final String mapping = randomBoolean() ? "{\"type\":\"string\"}" : "{\"type\":\"keyword\"}"; + FieldMappingMetadata metaData = new FieldMappingMetadata("my field", new BytesArray(mapping)); + fieldMappings.put("field" + k, metaData); } - mappings.put("index" + i, doctypesMappings); } return mappings; } diff --git a/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java b/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java index 56dfbeffc21ac..5dd05789429bf 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java @@ -32,65 +32,35 @@ package org.opensearch.action.admin.indices.mapping.get; -import com.carrotsearch.hppc.cursors.ObjectCursor; import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.common.io.stream.Writeable; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.ToXContent.Params; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.index.mapper.MapperService; -import org.opensearch.rest.BaseRestHandler; -import org.opensearch.test.AbstractSerializingTestCase; +import org.opensearch.test.AbstractWireSerializingTestCase; import org.opensearch.test.EqualsHashCodeTestUtils; import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; -import java.util.Iterator; -import java.util.List; import java.util.Map; import java.util.Objects; -public class GetMappingsResponseTests extends AbstractSerializingTestCase { - - @Override - protected boolean supportsUnknownFields() { - return false; - } +public class GetMappingsResponseTests extends AbstractWireSerializingTestCase { public void testCheckEqualsAndHashCode() { GetMappingsResponse resp = createTestInstance(); EqualsHashCodeTestUtils.checkEqualsAndHashCode(resp, r -> new GetMappingsResponse(r.mappings()), GetMappingsResponseTests::mutate); } - @Override - protected GetMappingsResponse doParseInstance(XContentParser parser) throws IOException { - return GetMappingsResponse.fromXContent(parser); - } - @Override protected Writeable.Reader instanceReader() { return GetMappingsResponse::new; } - private static GetMappingsResponse mutate(GetMappingsResponse original) throws IOException { - ImmutableOpenMap.Builder> builder = ImmutableOpenMap.builder(original.mappings()); + private static GetMappingsResponse mutate(GetMappingsResponse original) { + ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(original.mappings()); String indexKey = original.mappings().keys().iterator().next().value; + builder.put(indexKey + "1", createMappingsForIndex()); - ImmutableOpenMap.Builder typeBuilder = ImmutableOpenMap.builder(original.mappings().get(indexKey)); - final String typeKey; - Iterator> iter = original.mappings().get(indexKey).keys().iterator(); - if (iter.hasNext()) { - typeKey = iter.next().value; - } else { - typeKey = "new-type"; - } - - typeBuilder.put(typeKey, new MappingMetadata("type-" + randomAlphaOfLength(6), randomFieldMapping())); - - builder.put(indexKey, typeBuilder.build()); return new GetMappingsResponse(builder.build()); } @@ -99,48 +69,23 @@ protected GetMappingsResponse mutateInstance(GetMappingsResponse instance) throw return mutate(instance); } - public static ImmutableOpenMap createMappingsForIndex(int typeCount, boolean randomTypeName) { - List typeMappings = new ArrayList<>(typeCount); - - for (int i = 0; i < typeCount; i++) { - if (rarely() == false) { // rarely have no fields - Map mappings = new HashMap<>(); - mappings.put("field-" + i, randomFieldMapping()); - if (randomBoolean()) { - mappings.put("field2-" + i, randomFieldMapping()); - } - - try { - String typeName = MapperService.SINGLE_MAPPING_NAME; - if (randomTypeName) { - typeName = "type-" + randomAlphaOfLength(5); - } - MappingMetadata mmd = new MappingMetadata(typeName, mappings); - typeMappings.add(mmd); - } catch (IOException e) { - fail("shouldn't have failed " + e); - } + public static MappingMetadata createMappingsForIndex() { + Map mappings = new HashMap<>(); + if (rarely() == false) { // rarely have no fields + mappings.put("field", randomFieldMapping()); + if (randomBoolean()) { + mappings.put("field2", randomFieldMapping()); } + String typeName = MapperService.SINGLE_MAPPING_NAME; + return new MappingMetadata(typeName, mappings); } - ImmutableOpenMap.Builder typeBuilder = ImmutableOpenMap.builder(); - typeMappings.forEach(mmd -> typeBuilder.put(mmd.type(), mmd)); - return typeBuilder.build(); - } - - /** - * For xContent roundtrip testing we force the xContent output to still contain types because the parser - * still expects them. The new typeless parsing is implemented in the client side GetMappingsResponse. - */ - @Override - protected Params getToXContentParams() { - return new ToXContent.MapParams(Collections.singletonMap(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, "true")); + return new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, mappings); } @Override protected GetMappingsResponse createTestInstance() { - ImmutableOpenMap.Builder> indexBuilder = ImmutableOpenMap.builder(); - int typeCount = rarely() ? 0 : 1; - indexBuilder.put("index-" + randomAlphaOfLength(5), createMappingsForIndex(typeCount, randomBoolean())); + ImmutableOpenMap.Builder indexBuilder = ImmutableOpenMap.builder(); + indexBuilder.put("index-" + randomAlphaOfLength(5), createMappingsForIndex()); GetMappingsResponse resp = new GetMappingsResponse(indexBuilder.build()); logger.debug("--> created: {}", resp); return resp; diff --git a/server/src/test/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestTests.java index b45e7d1225017..fd6fc3b6839d7 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestTests.java @@ -66,13 +66,9 @@ public class PutMappingRequestTests extends OpenSearchTestCase { public void testValidation() { - PutMappingRequest r = new PutMappingRequest("myindex").type(""); + PutMappingRequest r = new PutMappingRequest("myindex"); ActionRequestValidationException ex = r.validate(); - assertNotNull("type validation should fail", ex); - assertTrue(ex.getMessage().contains("type is empty")); - r.type("mytype"); - ex = r.validate(); assertNotNull("source validation should fail", ex); assertTrue(ex.getMessage().contains("source is missing")); @@ -96,21 +92,20 @@ public void testValidation() { } /** - * Test that {@link PutMappingRequest#buildFromSimplifiedDef(String, Object...)} + * Test that {@link PutMappingRequest#buildFromSimplifiedDef(Object...)} * rejects inputs where the {@code Object...} varargs of field name and properties are not * paired correctly */ public void testBuildFromSimplifiedDef() { IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> PutMappingRequest.buildFromSimplifiedDef("type", "only_field") + () -> PutMappingRequest.buildFromSimplifiedDef("only_field") ); assertEquals("mapping source must be pairs of fieldnames and properties definition.", e.getMessage()); } public void testToXContent() throws IOException { PutMappingRequest request = new PutMappingRequest("foo"); - request.type("my_type"); XContentBuilder mapping = JsonXContent.contentBuilder().startObject(); mapping.startObject("properties"); @@ -128,7 +123,6 @@ public void testToXContent() throws IOException { public void testToXContentWithEmptySource() throws IOException { PutMappingRequest request = new PutMappingRequest("foo"); - request.type("my_type"); String actualRequestBody = Strings.toString(request); String expectedRequestBody = "{}"; @@ -166,10 +160,7 @@ private static PutMappingRequest createTestItem() throws IOException { String index = randomAlphaOfLength(5); PutMappingRequest request = new PutMappingRequest(index); - - String type = randomAlphaOfLength(5); - request.type(type); - request.source(RandomCreateIndexGenerator.randomMapping(type)); + request.source(RandomCreateIndexGenerator.randomMapping("_doc")); return request; } diff --git a/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java index 8e5c3d9f59a86..0fcc60e2a4087 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -34,7 +34,6 @@ import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.admin.indices.create.CreateIndexRequest; -import org.opensearch.action.admin.indices.create.CreateIndexRequestTests; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.BytesStreamOutput; @@ -54,7 +53,6 @@ import org.opensearch.indices.IndicesModule; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.XContentTestUtils; -import org.opensearch.test.hamcrest.OpenSearchAssertions; import java.io.IOException; import org.junit.Before; @@ -64,7 +62,7 @@ import java.util.Map; import java.util.function.Consumer; -import static org.opensearch.common.xcontent.ToXContent.EMPTY_PARAMS; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class RolloverRequestTests extends OpenSearchTestCase { @@ -87,7 +85,7 @@ public void testConditionsParsing() throws Exception { .field("max_size", "45gb") .endObject() .endObject(); - request.fromXContent(false, createParser(builder)); + request.fromXContent(createParser(builder)); Map> conditions = request.getConditions(); assertThat(conditions.size(), equalTo(3)); MaxAgeCondition maxAgeCondition = (MaxAgeCondition) conditions.get(MaxAgeCondition.NAME); @@ -107,7 +105,6 @@ public void testParsingWithIndexSettings() throws Exception { .field("max_docs", 100) .endObject() .startObject("mappings") - .startObject("type1") .startObject("properties") .startObject("field1") .field("type", "string") @@ -115,7 +112,6 @@ public void testParsingWithIndexSettings() throws Exception { .endObject() .endObject() .endObject() - .endObject() .startObject("settings") .field("number_of_shards", 10) .endObject() @@ -124,10 +120,10 @@ public void testParsingWithIndexSettings() throws Exception { .endObject() .endObject() .endObject(); - request.fromXContent(true, createParser(builder)); + request.fromXContent(createParser(builder)); Map> conditions = request.getConditions(); assertThat(conditions.size(), equalTo(2)); - assertThat(request.getCreateIndexRequest().mappings().size(), equalTo(1)); + assertThat(request.getCreateIndexRequest().mappings(), containsString("not_analyzed")); assertThat(request.getCreateIndexRequest().aliases().size(), equalTo(1)); assertThat(request.getCreateIndexRequest().settings().getAsInt("number_of_shards", 0), equalTo(10)); } @@ -145,11 +141,10 @@ public void testTypelessMappingParsing() throws Exception { .endObject() .endObject(); - boolean includeTypeName = false; - request.fromXContent(includeTypeName, createParser(builder)); + request.fromXContent(createParser(builder)); CreateIndexRequest createIndexRequest = request.getCreateIndexRequest(); - String mapping = createIndexRequest.mappings().get(MapperService.SINGLE_MAPPING_NAME); + String mapping = createIndexRequest.mappings(); assertNotNull(mapping); Map parsedMapping = XContentHelper.convertToMap(new BytesArray(mapping), false, XContentType.JSON).v2(); @@ -182,27 +177,6 @@ public void testSerialize() throws Exception { } } - public void testToAndFromXContent() throws IOException { - RolloverRequest rolloverRequest = createTestItem(); - - final XContentType xContentType = randomFrom(XContentType.values()); - boolean humanReadable = randomBoolean(); - BytesReference originalBytes = toShuffledXContent(rolloverRequest, xContentType, EMPTY_PARAMS, humanReadable); - - RolloverRequest parsedRolloverRequest = new RolloverRequest(); - parsedRolloverRequest.fromXContent(true, createParser(xContentType.xContent(), originalBytes)); - - CreateIndexRequest createIndexRequest = rolloverRequest.getCreateIndexRequest(); - CreateIndexRequest parsedCreateIndexRequest = parsedRolloverRequest.getCreateIndexRequest(); - CreateIndexRequestTests.assertMappingsEqual(createIndexRequest.mappings(), parsedCreateIndexRequest.mappings()); - CreateIndexRequestTests.assertAliasesEqual(createIndexRequest.aliases(), parsedCreateIndexRequest.aliases()); - assertEquals(createIndexRequest.settings(), parsedCreateIndexRequest.settings()); - assertEquals(rolloverRequest.getConditions(), parsedRolloverRequest.getConditions()); - - BytesReference finalBytes = toShuffledXContent(parsedRolloverRequest, xContentType, EMPTY_PARAMS, humanReadable); - OpenSearchAssertions.assertToXContentEquivalent(originalBytes, finalBytes, xContentType); - } - public void testUnknownFields() throws IOException { final RolloverRequest request = new RolloverRequest(); XContentType xContentType = randomFrom(XContentType.values()); @@ -215,7 +189,7 @@ public void testUnknownFields() throws IOException { } builder.endObject(); BytesReference mutated = XContentTestUtils.insertRandomFields(xContentType, BytesReference.bytes(builder), null, random()); - expectThrows(XContentParseException.class, () -> request.fromXContent(false, createParser(xContentType.xContent(), mutated))); + expectThrows(XContentParseException.class, () -> request.fromXContent(createParser(xContentType.xContent(), mutated))); } public void testSameConditionCanOnlyBeAddedOnce() { @@ -244,8 +218,8 @@ public void testValidation() { private static RolloverRequest createTestItem() throws IOException { RolloverRequest rolloverRequest = new RolloverRequest(); if (randomBoolean()) { - String type = randomAlphaOfLengthBetween(3, 10); - rolloverRequest.getCreateIndexRequest().mapping(type, RandomCreateIndexGenerator.randomMapping(type)); + rolloverRequest.getCreateIndexRequest() + .mapping(MapperService.SINGLE_MAPPING_NAME, RandomCreateIndexGenerator.randomMapping(MapperService.SINGLE_MAPPING_NAME)); } if (randomBoolean()) { RandomCreateIndexGenerator.randomAliases(rolloverRequest.getCreateIndexRequest()); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java index 7a47f2575ae6a..ca3b1f3f3815d 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java @@ -65,7 +65,7 @@ public void setupIndex() { int numDocs = scaledRandomIntBetween(100, 1000); for (int j = 0; j < numDocs; ++j) { String id = Integer.toString(j); - client().prepareIndex("test", "type1", id).setSource("text", "sometext").get(); + client().prepareIndex("test").setId(id).setSource("text", "sometext").get(); } client().admin().indices().prepareFlush("test").get(); client().admin().indices().prepareRefresh().get(); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/stats/IndicesStatsTests.java b/server/src/test/java/org/opensearch/action/admin/indices/stats/IndicesStatsTests.java index 0cf9f9fe152d6..6a84c5894fceb 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/stats/IndicesStatsTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/stats/IndicesStatsTests.java @@ -92,7 +92,7 @@ public void testSegmentStats() throws Exception { .setSettings(Settings.builder().put("index.store.type", storeType.getSettingsKey())) ); ensureGreen("test"); - client().prepareIndex("test", "doc", "1").setSource("foo", "bar", "bar", "baz", "baz", 42).get(); + client().prepareIndex("test").setId("1").setSource("foo", "bar", "bar", "baz", "baz", 42).get(); client().admin().indices().prepareRefresh("test").get(); IndicesStatsResponse rsp = client().admin().indices().prepareStats("test").get(); @@ -101,7 +101,7 @@ public void testSegmentStats() throws Exception { assertThat(stats.getCount(), greaterThan(0L)); // now check multiple segments stats are merged together - client().prepareIndex("test", "doc", "2").setSource("foo", "bar", "bar", "baz", "baz", 43).get(); + client().prepareIndex("test").setId("2").setSource("foo", "bar", "bar", "baz", "baz", 43).get(); client().admin().indices().prepareRefresh("test").get(); rsp = client().admin().indices().prepareStats("test").get(); @@ -129,7 +129,8 @@ public void testRefreshListeners() throws Exception { createIndex("test", Settings.builder().put("refresh_interval", -1).build()); // Index a document asynchronously so the request will only return when document is refreshed - ActionFuture index = client().prepareIndex("test", "test", "test") + ActionFuture index = client().prepareIndex("test") + .setId("test") .setSource("test", "test") .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) .execute(); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponseTests.java b/server/src/test/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponseTests.java index 6f30781ab9bbe..7f62861d4f332 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponseTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponseTests.java @@ -32,31 +32,23 @@ package org.opensearch.action.admin.indices.template.get; -import org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.opensearch.cluster.metadata.AliasMetadata; import org.opensearch.cluster.metadata.IndexTemplateMetadata; +import org.opensearch.common.io.stream.Writeable; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentParser; -import org.opensearch.test.AbstractXContentTestCase; +import org.opensearch.test.AbstractWireSerializingTestCase; import java.io.IOException; import java.io.UncheckedIOException; import java.util.ArrayList; -import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.stream.Collectors; import java.util.stream.IntStream; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; import static org.hamcrest.Matchers.equalTo; -public class GetIndexTemplatesResponseTests extends AbstractXContentTestCase { - @Override - protected GetIndexTemplatesResponse doParseInstance(XContentParser parser) throws IOException { - return GetIndexTemplatesResponse.fromXContent(parser); - } +public class GetIndexTemplatesResponseTests extends AbstractWireSerializingTestCase { @Override protected GetIndexTemplatesResponse createTestInstance() { @@ -80,7 +72,7 @@ protected GetIndexTemplatesResponse createTestInstance() { } if (randomBoolean()) { try { - templateBuilder.putMapping("doc", "{\"doc\":{\"properties\":{\"type\":\"text\"}}}"); + templateBuilder.putMapping("doc", "{\"properties\":{\"type\":\"text\"}}"); } catch (IOException ex) { throw new UncheckedIOException(ex); } @@ -91,20 +83,8 @@ protected GetIndexTemplatesResponse createTestInstance() { } @Override - protected boolean supportsUnknownFields() { - // We can not inject anything at the top level because a GetIndexTemplatesResponse is serialized as a map - // from template name to template content. IndexTemplateMetadataTests already covers situations where we - // inject arbitrary things inside the IndexTemplateMetadata. - return false; - } - - /** - * For now, we only unit test the legacy typed responses. This will soon no longer be the case, - * as we introduce support for typeless xContent parsing in {@link GetFieldMappingsResponse}. - */ - @Override - protected ToXContent.Params getToXContentParams() { - return new ToXContent.MapParams(Collections.singletonMap(INCLUDE_TYPE_NAME_PARAMETER, "true")); + protected Writeable.Reader instanceReader() { + return GetIndexTemplatesResponse::new; } @Override diff --git a/server/src/test/java/org/opensearch/action/bulk/BulkItemResponseTests.java b/server/src/test/java/org/opensearch/action/bulk/BulkItemResponseTests.java index 4d33b389c314c..808872fee6f96 100644 --- a/server/src/test/java/org/opensearch/action/bulk/BulkItemResponseTests.java +++ b/server/src/test/java/org/opensearch/action/bulk/BulkItemResponseTests.java @@ -57,7 +57,7 @@ public class BulkItemResponseTests extends OpenSearchTestCase { public void testFailureToString() { - Failure failure = new Failure("index", "type", "id", new RuntimeException("test")); + Failure failure = new Failure("index", "id", new RuntimeException("test")); String toString = failure.toString(); assertThat(toString, containsString("\"type\":\"runtime_exception\"")); assertThat(toString, containsString("\"reason\":\"test\"")); @@ -101,16 +101,15 @@ public void testFailureToAndFromXContent() throws IOException { int itemId = randomIntBetween(0, 100); String index = randomAlphaOfLength(5); - String type = randomAlphaOfLength(5); String id = randomAlphaOfLength(5); DocWriteRequest.OpType opType = randomFrom(DocWriteRequest.OpType.values()); final Tuple exceptions = randomExceptions(); Exception bulkItemCause = (Exception) exceptions.v1(); - Failure bulkItemFailure = new Failure(index, type, id, bulkItemCause); + Failure bulkItemFailure = new Failure(index, id, bulkItemCause); BulkItemResponse bulkItemResponse = new BulkItemResponse(itemId, opType, bulkItemFailure); - Failure expectedBulkItemFailure = new Failure(index, type, id, exceptions.v2(), ExceptionsHelper.status(bulkItemCause)); + Failure expectedBulkItemFailure = new Failure(index, id, exceptions.v2(), ExceptionsHelper.status(bulkItemCause)); BulkItemResponse expectedBulkItemResponse = new BulkItemResponse(itemId, opType, expectedBulkItemFailure); BytesReference originalBytes = toShuffledXContent(bulkItemResponse, xContentType, ToXContent.EMPTY_PARAMS, randomBoolean()); @@ -133,7 +132,6 @@ public void testFailureToAndFromXContent() throws IOException { public static void assertBulkItemResponse(BulkItemResponse expected, BulkItemResponse actual) { assertEquals(expected.getItemId(), actual.getItemId()); assertEquals(expected.getIndex(), actual.getIndex()); - assertEquals(expected.getType(), actual.getType()); assertEquals(expected.getId(), actual.getId()); assertEquals(expected.getOpType(), actual.getOpType()); assertEquals(expected.getVersion(), actual.getVersion()); @@ -144,7 +142,6 @@ public static void assertBulkItemResponse(BulkItemResponse expected, BulkItemRes BulkItemResponse.Failure actualFailure = actual.getFailure(); assertEquals(expectedFailure.getIndex(), actualFailure.getIndex()); - assertEquals(expectedFailure.getType(), actualFailure.getType()); assertEquals(expectedFailure.getId(), actualFailure.getId()); assertEquals(expectedFailure.getMessage(), actualFailure.getMessage()); assertEquals(expectedFailure.getStatus(), actualFailure.getStatus()); diff --git a/server/src/test/java/org/opensearch/action/bulk/BulkPrimaryExecutionContextTests.java b/server/src/test/java/org/opensearch/action/bulk/BulkPrimaryExecutionContextTests.java index b98bdb2e3e40d..5159135a22618 100644 --- a/server/src/test/java/org/opensearch/action/bulk/BulkPrimaryExecutionContextTests.java +++ b/server/src/test/java/org/opensearch/action/bulk/BulkPrimaryExecutionContextTests.java @@ -85,16 +85,16 @@ private BulkShardRequest generateRandomRequest() { final DocWriteRequest request; switch (randomFrom(DocWriteRequest.OpType.values())) { case INDEX: - request = new IndexRequest("index", "_doc", "id_" + i); + request = new IndexRequest("index").id("id_" + i); break; case CREATE: - request = new IndexRequest("index", "_doc", "id_" + i).create(true); + request = new IndexRequest("index").id("id_" + i).create(true); break; case UPDATE: - request = new UpdateRequest("index", "_doc", "id_" + i); + request = new UpdateRequest("index", "id_" + i); break; case DELETE: - request = new DeleteRequest("index", "_doc", "id_" + i); + request = new DeleteRequest("index", "id_" + i); break; default: throw new AssertionError("unknown type"); @@ -139,7 +139,7 @@ public void testTranslogLocation() { } break; case UPDATE: - context.setRequestToExecute(new IndexRequest(current.index(), current.type(), current.id())); + context.setRequestToExecute(new IndexRequest(current.index()).id(current.id())); if (failure) { result = new Engine.IndexResult(new OpenSearchException("bla"), 1, 1, 1); } else { diff --git a/server/src/test/java/org/opensearch/action/bulk/BulkRequestModifierTests.java b/server/src/test/java/org/opensearch/action/bulk/BulkRequestModifierTests.java index ce35815a296e0..e7e1166eb57fa 100644 --- a/server/src/test/java/org/opensearch/action/bulk/BulkRequestModifierTests.java +++ b/server/src/test/java/org/opensearch/action/bulk/BulkRequestModifierTests.java @@ -58,7 +58,7 @@ public void testBulkRequestModifier() { int numRequests = scaledRandomIntBetween(8, 64); BulkRequest bulkRequest = new BulkRequest(); for (int i = 0; i < numRequests; i++) { - bulkRequest.add(new IndexRequest("_index", "_type", String.valueOf(i)).source("{}", XContentType.JSON)); + bulkRequest.add(new IndexRequest("_index").id(String.valueOf(i)).source("{}", XContentType.JSON)); } CaptureActionListener actionListener = new CaptureActionListener(); TransportBulkAction.BulkRequestModifier bulkRequestModifier = new TransportBulkAction.BulkRequestModifier(bulkRequest); @@ -87,7 +87,6 @@ public void testBulkRequestModifier() { BulkItemResponse item = bulkResponse.getItems()[j]; assertThat(item.isFailed(), is(true)); assertThat(item.getFailure().getIndex(), equalTo("_index")); - assertThat(item.getFailure().getType(), equalTo("_type")); assertThat(item.getFailure().getId(), equalTo(String.valueOf(j))); assertThat(item.getFailure().getMessage(), equalTo("java.lang.RuntimeException")); } else { @@ -99,7 +98,7 @@ public void testBulkRequestModifier() { public void testPipelineFailures() { BulkRequest originalBulkRequest = new BulkRequest(); for (int i = 0; i < 32; i++) { - originalBulkRequest.add(new IndexRequest("index", "type", String.valueOf(i))); + originalBulkRequest.add(new IndexRequest("index").id(String.valueOf(i))); } TransportBulkAction.BulkRequestModifier modifier = new TransportBulkAction.BulkRequestModifier(originalBulkRequest); @@ -128,15 +127,7 @@ public void onFailure(Exception e) {} List originalResponses = new ArrayList<>(); for (DocWriteRequest actionRequest : bulkRequest.requests()) { IndexRequest indexRequest = (IndexRequest) actionRequest; - IndexResponse indexResponse = new IndexResponse( - new ShardId("index", "_na_", 0), - indexRequest.type(), - indexRequest.id(), - 1, - 17, - 1, - true - ); + IndexResponse indexResponse = new IndexResponse(new ShardId("index", "_na_", 0), indexRequest.id(), 1, 17, 1, true); originalResponses.add(new BulkItemResponse(Integer.parseInt(indexRequest.id()), indexRequest.opType(), indexResponse)); } bulkResponseListener.onResponse(new BulkResponse(originalResponses.toArray(new BulkItemResponse[originalResponses.size()]), 0)); @@ -150,7 +141,7 @@ public void onFailure(Exception e) {} public void testNoFailures() { BulkRequest originalBulkRequest = new BulkRequest(); for (int i = 0; i < 32; i++) { - originalBulkRequest.add(new IndexRequest("index", "type", String.valueOf(i))); + originalBulkRequest.add(new IndexRequest("index").id(String.valueOf(i))); } TransportBulkAction.BulkRequestModifier modifier = new TransportBulkAction.BulkRequestModifier(originalBulkRequest); diff --git a/server/src/test/java/org/opensearch/action/bulk/BulkRequestParserTests.java b/server/src/test/java/org/opensearch/action/bulk/BulkRequestParserTests.java index b7ba887a0f1e2..239bb19c5f6ad 100644 --- a/server/src/test/java/org/opensearch/action/bulk/BulkRequestParserTests.java +++ b/server/src/test/java/org/opensearch/action/bulk/BulkRequestParserTests.java @@ -35,7 +35,6 @@ import org.opensearch.action.index.IndexRequest; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.rest.action.document.RestBulkAction; import org.opensearch.test.OpenSearchTestCase; import org.hamcrest.Matchers; @@ -50,7 +49,7 @@ public void testIndexRequest() throws IOException { BytesArray request = new BytesArray("{ \"index\":{ \"_id\": \"bar\" } }\n{}\n"); BulkRequestParser parser = new BulkRequestParser(randomBoolean()); final AtomicBoolean parsed = new AtomicBoolean(); - parser.parse(request, "foo", null, null, null, null, false, XContentType.JSON, indexRequest -> { + parser.parse(request, "foo", null, null, null, null, false, XContentType.JSON, (indexRequest, type) -> { assertFalse(parsed.get()); assertEquals("foo", indexRequest.index()); assertEquals("bar", indexRequest.id()); @@ -68,7 +67,7 @@ public void testIndexRequest() throws IOException { true, false, XContentType.JSON, - indexRequest -> { assertTrue(indexRequest.isRequireAlias()); }, + (indexRequest, type) -> { assertTrue(indexRequest.isRequireAlias()); }, req -> fail(), req -> fail() ); @@ -83,7 +82,7 @@ public void testIndexRequest() throws IOException { null, false, XContentType.JSON, - indexRequest -> { assertTrue(indexRequest.isRequireAlias()); }, + (indexRequest, type) -> { assertTrue(indexRequest.isRequireAlias()); }, req -> fail(), req -> fail() ); @@ -98,7 +97,7 @@ public void testIndexRequest() throws IOException { true, false, XContentType.JSON, - indexRequest -> { assertFalse(indexRequest.isRequireAlias()); }, + (indexRequest, type) -> { assertFalse(indexRequest.isRequireAlias()); }, req -> fail(), req -> fail() ); @@ -108,12 +107,24 @@ public void testDeleteRequest() throws IOException { BytesArray request = new BytesArray("{ \"delete\":{ \"_id\": \"bar\" } }\n"); BulkRequestParser parser = new BulkRequestParser(randomBoolean()); final AtomicBoolean parsed = new AtomicBoolean(); - parser.parse(request, "foo", null, null, null, null, false, XContentType.JSON, req -> fail(), req -> fail(), deleteRequest -> { - assertFalse(parsed.get()); - assertEquals("foo", deleteRequest.index()); - assertEquals("bar", deleteRequest.id()); - parsed.set(true); - }); + parser.parse( + request, + "foo", + null, + null, + null, + null, + false, + XContentType.JSON, + (req, type) -> fail(), + req -> fail(), + deleteRequest -> { + assertFalse(parsed.get()); + assertEquals("foo", deleteRequest.index()); + assertEquals("bar", deleteRequest.id()); + parsed.set(true); + } + ); assertTrue(parsed.get()); } @@ -121,7 +132,7 @@ public void testUpdateRequest() throws IOException { BytesArray request = new BytesArray("{ \"update\":{ \"_id\": \"bar\" } }\n{}\n"); BulkRequestParser parser = new BulkRequestParser(randomBoolean()); final AtomicBoolean parsed = new AtomicBoolean(); - parser.parse(request, "foo", null, null, null, null, false, XContentType.JSON, req -> fail(), updateRequest -> { + parser.parse(request, "foo", null, null, null, null, false, XContentType.JSON, (req, type) -> fail(), updateRequest -> { assertFalse(parsed.get()); assertEquals("foo", updateRequest.index()); assertEquals("bar", updateRequest.id()); @@ -139,7 +150,7 @@ public void testUpdateRequest() throws IOException { true, false, XContentType.JSON, - req -> fail(), + (req, type) -> fail(), updateRequest -> { assertTrue(updateRequest.isRequireAlias()); }, req -> fail() ); @@ -154,7 +165,7 @@ public void testUpdateRequest() throws IOException { null, false, XContentType.JSON, - req -> fail(), + (req, type) -> fail(), updateRequest -> { assertTrue(updateRequest.isRequireAlias()); }, req -> fail() ); @@ -169,7 +180,7 @@ public void testUpdateRequest() throws IOException { true, false, XContentType.JSON, - req -> fail(), + (req, type) -> fail(), updateRequest -> { assertFalse(updateRequest.isRequireAlias()); }, req -> fail() ); @@ -189,7 +200,7 @@ public void testBarfOnLackOfTrailingNewline() { null, false, XContentType.JSON, - indexRequest -> fail(), + (indexRequest, type) -> fail(), req -> fail(), req -> fail() ) @@ -203,24 +214,34 @@ public void testFailOnExplicitIndex() { IllegalArgumentException ex = expectThrows( IllegalArgumentException.class, - () -> parser.parse(request, null, null, null, null, null, false, XContentType.JSON, req -> fail(), req -> fail(), req -> fail()) + () -> parser.parse( + request, + null, + null, + null, + null, + null, + false, + XContentType.JSON, + (req, type) -> fail(), + req -> fail(), + req -> fail() + ) ); assertEquals("explicit index in bulk is not allowed", ex.getMessage()); } - public void testTypeWarning() throws IOException { + public void testTypesStillParsedForBulkMonitoring() throws IOException { BytesArray request = new BytesArray("{ \"index\":{ \"_type\": \"quux\", \"_id\": \"bar\" } }\n{}\n"); - BulkRequestParser parser = new BulkRequestParser(true); + BulkRequestParser parser = new BulkRequestParser(false); final AtomicBoolean parsed = new AtomicBoolean(); - parser.parse(request, "foo", null, null, null, null, false, XContentType.JSON, indexRequest -> { + parser.parse(request, "foo", null, null, null, null, false, XContentType.JSON, (indexRequest, type) -> { assertFalse(parsed.get()); assertEquals("foo", indexRequest.index()); assertEquals("bar", indexRequest.id()); parsed.set(true); }, req -> fail(), req -> fail()); assertTrue(parsed.get()); - - assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testParseDeduplicatesParameterStrings() throws IOException { @@ -230,7 +251,19 @@ public void testParseDeduplicatesParameterStrings() throws IOException { ); BulkRequestParser parser = new BulkRequestParser(randomBoolean()); final List indexRequests = new ArrayList<>(); - parser.parse(request, null, null, null, null, null, true, XContentType.JSON, indexRequests::add, req -> fail(), req -> fail()); + parser.parse( + request, + null, + null, + null, + null, + null, + true, + XContentType.JSON, + (indexRequest, type) -> indexRequests.add(indexRequest), + req -> fail(), + req -> fail() + ); assertThat(indexRequests, Matchers.hasSize(2)); final IndexRequest first = indexRequests.get(0); final IndexRequest second = indexRequests.get(1); diff --git a/server/src/test/java/org/opensearch/action/bulk/BulkRequestTests.java b/server/src/test/java/org/opensearch/action/bulk/BulkRequestTests.java index f58567b85be3b..9fd57a78d2097 100644 --- a/server/src/test/java/org/opensearch/action/bulk/BulkRequestTests.java +++ b/server/src/test/java/org/opensearch/action/bulk/BulkRequestTests.java @@ -47,7 +47,6 @@ import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.rest.action.document.RestBulkAction; import org.opensearch.script.Script; import org.opensearch.test.OpenSearchTestCase; @@ -76,12 +75,10 @@ public void testSimpleBulk1() throws Exception { assertThat(((IndexRequest) bulkRequest.requests().get(0)).source(), equalTo(new BytesArray("{ \"field1\" : \"value1\" }"))); assertThat(bulkRequest.requests().get(1), instanceOf(DeleteRequest.class)); assertThat(((IndexRequest) bulkRequest.requests().get(2)).source(), equalTo(new BytesArray("{ \"field1\" : \"value3\" }"))); - // This test's JSON contains outdated references to types - assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testSimpleBulkWithCarriageReturn() throws Exception { - String bulkAction = "{ \"index\":{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\"1\"} }\r\n{ \"field1\" : \"value1\" }\r\n"; + String bulkAction = "{ \"index\":{\"_index\":\"test\",\"_id\":\"1\"} }\r\n{ \"field1\" : \"value1\" }\r\n"; BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON); assertThat(bulkRequest.numberOfActions(), equalTo(1)); @@ -92,8 +89,6 @@ public void testSimpleBulkWithCarriageReturn() throws Exception { XContentType.JSON ).v2(); assertEquals("value1", sourceMap.get("field1")); - // This test's JSON contains outdated references to types - assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testSimpleBulk2() throws Exception { @@ -119,7 +114,6 @@ public void testSimpleBulk4() throws Exception { assertThat(((UpdateRequest) bulkRequest.requests().get(0)).retryOnConflict(), equalTo(2)); assertThat(((UpdateRequest) bulkRequest.requests().get(0)).doc().source().utf8ToString(), equalTo("{\"field\":\"value\"}")); assertThat(bulkRequest.requests().get(1).id(), equalTo("0")); - assertThat(bulkRequest.requests().get(1).type(), equalTo("type1")); assertThat(bulkRequest.requests().get(1).index(), equalTo("index1")); Script script = ((UpdateRequest) bulkRequest.requests().get(1)).script(); assertThat(script, notNullValue()); @@ -130,30 +124,26 @@ public void testSimpleBulk4() throws Exception { assertThat(scriptParams.size(), equalTo(1)); assertThat(scriptParams.get("param1"), equalTo(1)); assertThat(((UpdateRequest) bulkRequest.requests().get(1)).upsertRequest().source().utf8ToString(), equalTo("{\"counter\":1}")); - // This test's JSON contains outdated references to types - assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testBulkAllowExplicitIndex() throws Exception { String bulkAction1 = copyToStringFromClasspath("/org/opensearch/action/bulk/simple-bulk.json"); Exception ex = expectThrows( Exception.class, - () -> new BulkRequest().add(new BytesArray(bulkAction1.getBytes(StandardCharsets.UTF_8)), null, null, false, XContentType.JSON) + () -> new BulkRequest().add(new BytesArray(bulkAction1.getBytes(StandardCharsets.UTF_8)), null, false, XContentType.JSON) ); assertEquals("explicit index in bulk is not allowed", ex.getMessage()); String bulkAction = copyToStringFromClasspath("/org/opensearch/action/bulk/simple-bulk5.json"); - new BulkRequest().add(new BytesArray(bulkAction.getBytes(StandardCharsets.UTF_8)), "test", null, false, XContentType.JSON); - // This test's JSON contains outdated references to types - assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); + new BulkRequest().add(new BytesArray(bulkAction.getBytes(StandardCharsets.UTF_8)), "test", false, XContentType.JSON); } public void testBulkAddIterable() { BulkRequest bulkRequest = Requests.bulkRequest(); List> requests = new ArrayList<>(); - requests.add(new IndexRequest("test", "test", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value")); - requests.add(new UpdateRequest("test", "test", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value")); - requests.add(new DeleteRequest("test", "test", "id")); + requests.add(new IndexRequest("test").id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value")); + requests.add(new UpdateRequest("test", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value")); + requests.add(new DeleteRequest("test", "id")); bulkRequest.add(requests); assertThat(bulkRequest.requests().size(), equalTo(3)); assertThat(bulkRequest.requests().get(0), instanceOf(IndexRequest.class)); @@ -169,8 +159,6 @@ public void testSimpleBulk6() throws Exception { () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON) ); assertThat(exc.getMessage(), containsString("Unknown key for a VALUE_STRING in [hello]")); - // This test's JSON contains outdated references to types - assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testSimpleBulk7() throws Exception { @@ -184,8 +172,6 @@ public void testSimpleBulk7() throws Exception { exc.getMessage(), containsString("Malformed action/metadata line [5], expected a simple value for field [_unknown] but found [START_ARRAY]") ); - // This test's JSON contains outdated references to types - assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testSimpleBulk8() throws Exception { @@ -196,8 +182,6 @@ public void testSimpleBulk8() throws Exception { () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON) ); assertThat(exc.getMessage(), containsString("Action/metadata line [3] contains an unknown parameter [_foo]")); - // This test's JSON contains outdated references to types - assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testSimpleBulk9() throws Exception { @@ -218,13 +202,10 @@ public void testSimpleBulk10() throws Exception { BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON); assertThat(bulkRequest.numberOfActions(), equalTo(9)); - // This test's JSON contains outdated references to types - assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testBulkActionShouldNotContainArray() throws Exception { - String bulkAction = "{ \"index\":{\"_index\":[\"index1\", \"index2\"],\"_type\":\"type1\",\"_id\":\"1\"} }\r\n" - + "{ \"field1\" : \"value1\" }\r\n"; + String bulkAction = "{ \"index\":{\"_index\":[\"index1\", \"index2\"],\"_id\":\"1\"} }\r\n" + "{ \"field1\" : \"value1\" }\r\n"; BulkRequest bulkRequest = new BulkRequest(); IllegalArgumentException exc = expectThrows( IllegalArgumentException.class, @@ -270,12 +251,12 @@ public void testBulkEmptyObject() throws Exception { public void testBulkRequestWithRefresh() throws Exception { BulkRequest bulkRequest = new BulkRequest(); // We force here a "id is missing" validation error - bulkRequest.add(new DeleteRequest("index", "type", null).setRefreshPolicy(RefreshPolicy.IMMEDIATE)); + bulkRequest.add(new DeleteRequest("index", null).setRefreshPolicy(RefreshPolicy.IMMEDIATE)); // We force here a "type is missing" validation error - bulkRequest.add(new DeleteRequest("index", "", "id")); - bulkRequest.add(new DeleteRequest("index", "type", "id").setRefreshPolicy(RefreshPolicy.IMMEDIATE)); - bulkRequest.add(new UpdateRequest("index", "type", "id").doc("{}", XContentType.JSON).setRefreshPolicy(RefreshPolicy.IMMEDIATE)); - bulkRequest.add(new IndexRequest("index", "type", "id").source("{}", XContentType.JSON).setRefreshPolicy(RefreshPolicy.IMMEDIATE)); + bulkRequest.add(new DeleteRequest("index", "id")); + bulkRequest.add(new DeleteRequest("index", "id").setRefreshPolicy(RefreshPolicy.IMMEDIATE)); + bulkRequest.add(new UpdateRequest("index", "id").doc("{}", XContentType.JSON).setRefreshPolicy(RefreshPolicy.IMMEDIATE)); + bulkRequest.add(new IndexRequest("index").id("id").source("{}", XContentType.JSON).setRefreshPolicy(RefreshPolicy.IMMEDIATE)); ActionRequestValidationException validate = bulkRequest.validate(); assertThat(validate, notNullValue()); assertThat(validate.validationErrors(), not(empty())); @@ -284,7 +265,6 @@ public void testBulkRequestWithRefresh() throws Exception { contains( "RefreshPolicy is not supported on an item request. Set it on the BulkRequest instead.", "id is missing", - "type is missing", "RefreshPolicy is not supported on an item request. Set it on the BulkRequest instead.", "RefreshPolicy is not supported on an item request. Set it on the BulkRequest instead.", "RefreshPolicy is not supported on an item request. Set it on the BulkRequest instead." @@ -295,8 +275,8 @@ public void testBulkRequestWithRefresh() throws Exception { // issue 15120 public void testBulkNoSource() throws Exception { BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(new UpdateRequest("index", "type", "id")); - bulkRequest.add(new IndexRequest("index", "type", "id")); + bulkRequest.add(new UpdateRequest("index", "id")); + bulkRequest.add(new IndexRequest("index").id("id")); ActionRequestValidationException validate = bulkRequest.validate(); assertThat(validate, notNullValue()); assertThat(validate.validationErrors(), not(empty())); @@ -318,7 +298,6 @@ public void testSmileIsSupported() throws IOException { builder.startObject(); builder.startObject("index"); builder.field("_index", "index"); - builder.field("_type", "type"); builder.field("_id", "test"); builder.endObject(); builder.endObject(); @@ -334,19 +313,16 @@ public void testSmileIsSupported() throws IOException { } BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(data, null, null, xContentType); + bulkRequest.add(data, null, xContentType); assertEquals(1, bulkRequest.requests().size()); DocWriteRequest docWriteRequest = bulkRequest.requests().get(0); assertEquals(DocWriteRequest.OpType.INDEX, docWriteRequest.opType()); assertEquals("index", docWriteRequest.index()); - assertEquals("type", docWriteRequest.type()); assertEquals("test", docWriteRequest.id()); assertThat(docWriteRequest, instanceOf(IndexRequest.class)); IndexRequest request = (IndexRequest) docWriteRequest; assertEquals(1, request.sourceAsMap().size()); assertEquals("value", request.sourceAsMap().get("field")); - // This test's content contains outdated references to types - assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testToValidateUpsertRequestAndCASInBulkRequest() throws IOException { @@ -357,7 +333,6 @@ public void testToValidateUpsertRequestAndCASInBulkRequest() throws IOException builder.startObject(); builder.startObject("update"); builder.field("_index", "index"); - builder.field("_type", "type"); builder.field("_id", "id"); builder.field("if_seq_no", 1L); builder.field("if_primary_term", 100L); @@ -372,7 +347,6 @@ public void testToValidateUpsertRequestAndCASInBulkRequest() throws IOException values.put("if_seq_no", 1L); values.put("if_primary_term", 100L); values.put("_index", "index"); - values.put("_type", "type"); builder.field("upsert", values); builder.endObject(); } @@ -380,10 +354,8 @@ public void testToValidateUpsertRequestAndCASInBulkRequest() throws IOException data = out.bytes(); } BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(data, null, null, xContentType); + bulkRequest.add(data, null, xContentType); assertThat(bulkRequest.validate().validationErrors(), contains("upsert requests don't support `if_seq_no` and `if_primary_term`")); - // This test's JSON contains outdated references to types - assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testBulkTerminatedByNewline() throws Exception { @@ -404,7 +376,5 @@ public void testBulkTerminatedByNewline() throws Exception { XContentType.JSON ); assertEquals(3, bulkRequestWithNewLine.numberOfActions()); - // This test's JSON contains outdated references to types - assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } } diff --git a/server/src/test/java/org/opensearch/action/bulk/BulkResponseTests.java b/server/src/test/java/org/opensearch/action/bulk/BulkResponseTests.java index 55fca8fc736db..e768d66ee04ce 100644 --- a/server/src/test/java/org/opensearch/action/bulk/BulkResponseTests.java +++ b/server/src/test/java/org/opensearch/action/bulk/BulkResponseTests.java @@ -86,17 +86,16 @@ public void testToAndFromXContent() throws IOException { expectedBulkItems[i] = new BulkItemResponse(i, opType, randomDocWriteResponses.v2()); } else { String index = randomAlphaOfLength(5); - String type = randomAlphaOfLength(5); String id = randomAlphaOfLength(5); Tuple failures = randomExceptions(); Exception bulkItemCause = (Exception) failures.v1(); - bulkItems[i] = new BulkItemResponse(i, opType, new BulkItemResponse.Failure(index, type, id, bulkItemCause)); + bulkItems[i] = new BulkItemResponse(i, opType, new BulkItemResponse.Failure(index, id, bulkItemCause)); expectedBulkItems[i] = new BulkItemResponse( i, opType, - new BulkItemResponse.Failure(index, type, id, failures.v2(), ExceptionsHelper.status(bulkItemCause)) + new BulkItemResponse.Failure(index, id, failures.v2(), ExceptionsHelper.status(bulkItemCause)) ); } } diff --git a/server/src/test/java/org/opensearch/action/bulk/RetryTests.java b/server/src/test/java/org/opensearch/action/bulk/RetryTests.java index 8a66e0cf6f751..d3280ede6ce15 100644 --- a/server/src/test/java/org/opensearch/action/bulk/RetryTests.java +++ b/server/src/test/java/org/opensearch/action/bulk/RetryTests.java @@ -87,11 +87,11 @@ public void tearDown() throws Exception { private BulkRequest createBulkRequest() { BulkRequest request = new BulkRequest(); - request.add(new UpdateRequest("shop", "products", "1")); - request.add(new UpdateRequest("shop", "products", "2")); - request.add(new UpdateRequest("shop", "products", "3")); - request.add(new UpdateRequest("shop", "products", "4")); - request.add(new UpdateRequest("shop", "products", "5")); + request.add(new UpdateRequest("shop", "1")); + request.add(new UpdateRequest("shop", "2")); + request.add(new UpdateRequest("shop", "3")); + request.add(new UpdateRequest("shop", "4")); + request.add(new UpdateRequest("shop", "5")); return request; } @@ -238,18 +238,14 @@ public void bulk(BulkRequest request, ActionListener listener) { } private BulkItemResponse successfulResponse() { - return new BulkItemResponse( - 1, - OpType.DELETE, - new DeleteResponse(new ShardId("test", "test", 0), "_doc", "test", 0, 0, 0, false) - ); + return new BulkItemResponse(1, OpType.DELETE, new DeleteResponse(new ShardId("test", "test", 0), "test", 0, 0, 0, false)); } private BulkItemResponse failedResponse() { return new BulkItemResponse( 1, OpType.INDEX, - new BulkItemResponse.Failure("test", "test", "1", new OpenSearchRejectedExecutionException("pool full")) + new BulkItemResponse.Failure("test", "1", new OpenSearchRejectedExecutionException("pool full")) ); } } diff --git a/server/src/test/java/org/opensearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java b/server/src/test/java/org/opensearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java index b2f6ce885d242..32e9dd44008cd 100644 --- a/server/src/test/java/org/opensearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java +++ b/server/src/test/java/org/opensearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java @@ -79,7 +79,7 @@ public void testNonExceptional() { bulkRequest.add(new IndexRequest(randomAlphaOfLength(5))); bulkRequest.add(new IndexRequest(randomAlphaOfLength(5))); bulkRequest.add(new DeleteRequest(randomAlphaOfLength(5))); - bulkRequest.add(new UpdateRequest(randomAlphaOfLength(5), randomAlphaOfLength(5), randomAlphaOfLength(5))); + bulkRequest.add(new UpdateRequest(randomAlphaOfLength(5), randomAlphaOfLength(5))); // Test emulating auto_create_index=false indicesThatCannotBeCreatedTestCase(emptySet(), bulkRequest, null); // Test emulating auto_create_index=true @@ -95,7 +95,7 @@ public void testAllFail() { bulkRequest.add(new IndexRequest("no")); bulkRequest.add(new IndexRequest("can't")); bulkRequest.add(new DeleteRequest("do").version(0).versionType(VersionType.EXTERNAL)); - bulkRequest.add(new UpdateRequest("nothin", randomAlphaOfLength(5), randomAlphaOfLength(5))); + bulkRequest.add(new UpdateRequest("nothin", randomAlphaOfLength(5))); indicesThatCannotBeCreatedTestCase( new HashSet<>(Arrays.asList("no", "can't", "do", "nothin")), bulkRequest, diff --git a/server/src/test/java/org/opensearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/opensearch/action/bulk/TransportBulkActionIngestTests.java index 8a804c5d7519e..4b98870422ce8 100644 --- a/server/src/test/java/org/opensearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/opensearch/action/bulk/TransportBulkActionIngestTests.java @@ -290,7 +290,7 @@ public void setupAction() { public void testIngestSkipped() throws Exception { BulkRequest bulkRequest = new BulkRequest(); - IndexRequest indexRequest = new IndexRequest("index", "type", "id"); + IndexRequest indexRequest = new IndexRequest("index").id("id"); indexRequest.source(emptyMap()); bulkRequest.add(indexRequest); action.execute(null, bulkRequest, ActionListener.wrap(response -> {}, exception -> { throw new AssertionError(exception); })); @@ -299,7 +299,7 @@ public void testIngestSkipped() throws Exception { } public void testSingleItemBulkActionIngestSkipped() throws Exception { - IndexRequest indexRequest = new IndexRequest("index", "type", "id"); + IndexRequest indexRequest = new IndexRequest("index").id("id"); indexRequest.source(emptyMap()); singleItemBulkWriteAction.execute( null, @@ -313,10 +313,10 @@ public void testSingleItemBulkActionIngestSkipped() throws Exception { public void testIngestLocal() throws Exception { Exception exception = new Exception("fake exception"); BulkRequest bulkRequest = new BulkRequest(); - IndexRequest indexRequest1 = new IndexRequest("index", "type", "id"); + IndexRequest indexRequest1 = new IndexRequest("index").id("id"); indexRequest1.source(emptyMap()); indexRequest1.setPipeline("testpipeline"); - IndexRequest indexRequest2 = new IndexRequest("index", "type", "id"); + IndexRequest indexRequest2 = new IndexRequest("index").id("id"); indexRequest2.source(emptyMap()); indexRequest2.setPipeline("testpipeline"); bulkRequest.add(indexRequest1); @@ -360,7 +360,7 @@ public void testIngestLocal() throws Exception { public void testSingleItemBulkActionIngestLocal() throws Exception { Exception exception = new Exception("fake exception"); - IndexRequest indexRequest = new IndexRequest("index", "type", "id"); + IndexRequest indexRequest = new IndexRequest("index").id("id"); indexRequest.source(emptyMap()); indexRequest.setPipeline("testpipeline"); AtomicBoolean responseCalled = new AtomicBoolean(false); @@ -444,7 +444,7 @@ public void testIngestSystemLocal() throws Exception { public void testIngestForward() throws Exception { localIngest = false; BulkRequest bulkRequest = new BulkRequest(); - IndexRequest indexRequest = new IndexRequest("index", "type", "id"); + IndexRequest indexRequest = new IndexRequest("index").id("id"); indexRequest.source(emptyMap()); indexRequest.setPipeline("testpipeline"); bulkRequest.add(indexRequest); @@ -485,7 +485,7 @@ public void testIngestForward() throws Exception { public void testSingleItemBulkActionIngestForward() throws Exception { localIngest = false; - IndexRequest indexRequest = new IndexRequest("index", "type", "id"); + IndexRequest indexRequest = new IndexRequest("index").id("id"); indexRequest.source(emptyMap()); indexRequest.setPipeline("testpipeline"); IndexResponse indexResponse = mock(IndexResponse.class); @@ -527,11 +527,11 @@ public void testSingleItemBulkActionIngestForward() throws Exception { } public void testUseDefaultPipeline() throws Exception { - validateDefaultPipeline(new IndexRequest(WITH_DEFAULT_PIPELINE, "type", "id")); + validateDefaultPipeline(new IndexRequest(WITH_DEFAULT_PIPELINE).id("id")); } public void testUseDefaultPipelineWithAlias() throws Exception { - validateDefaultPipeline(new IndexRequest(WITH_DEFAULT_PIPELINE_ALIAS, "type", "id")); + validateDefaultPipeline(new IndexRequest(WITH_DEFAULT_PIPELINE_ALIAS).id("id")); } public void testUseDefaultPipelineWithBulkUpsert() throws Exception { @@ -547,15 +547,14 @@ public void testUseDefaultPipelineWithBulkUpsertWithAlias() throws Exception { private void validatePipelineWithBulkUpsert(@Nullable String indexRequestIndexName, String updateRequestIndexName) throws Exception { Exception exception = new Exception("fake exception"); BulkRequest bulkRequest = new BulkRequest(); - IndexRequest indexRequest1 = new IndexRequest(indexRequestIndexName, "type", "id1").source(emptyMap()); - IndexRequest indexRequest2 = new IndexRequest(indexRequestIndexName, "type", "id2").source(emptyMap()); - IndexRequest indexRequest3 = new IndexRequest(indexRequestIndexName, "type", "id3").source(emptyMap()); - UpdateRequest upsertRequest = new UpdateRequest(updateRequestIndexName, "type", "id1").upsert(indexRequest1) - .script(mockScript("1")); - UpdateRequest docAsUpsertRequest = new UpdateRequest(updateRequestIndexName, "type", "id2").doc(indexRequest2).docAsUpsert(true); + IndexRequest indexRequest1 = new IndexRequest(indexRequestIndexName).id("id1").source(emptyMap()); + IndexRequest indexRequest2 = new IndexRequest(indexRequestIndexName).id("id2").source(emptyMap()); + IndexRequest indexRequest3 = new IndexRequest(indexRequestIndexName).id("id3").source(emptyMap()); + UpdateRequest upsertRequest = new UpdateRequest(updateRequestIndexName, "id1").upsert(indexRequest1).script(mockScript("1")); + UpdateRequest docAsUpsertRequest = new UpdateRequest(updateRequestIndexName, "id2").doc(indexRequest2).docAsUpsert(true); // this test only covers the mechanics that scripted bulk upserts will execute a default pipeline. However, in practice scripted // bulk upserts with a default pipeline are a bit surprising since the script executes AFTER the pipeline. - UpdateRequest scriptedUpsert = new UpdateRequest(updateRequestIndexName, "type", "id2").upsert(indexRequest3) + UpdateRequest scriptedUpsert = new UpdateRequest(updateRequestIndexName, "id2").upsert(indexRequest3) .script(mockScript("1")) .scriptedUpsert(true); bulkRequest.add(upsertRequest).add(docAsUpsertRequest).add(scriptedUpsert); @@ -604,7 +603,7 @@ private void validatePipelineWithBulkUpsert(@Nullable String indexRequestIndexNa public void testDoExecuteCalledTwiceCorrectly() throws Exception { Exception exception = new Exception("fake exception"); - IndexRequest indexRequest = new IndexRequest("missing_index", "type", "id"); + IndexRequest indexRequest = new IndexRequest("missing_index").id("id"); indexRequest.setPipeline("testpipeline"); indexRequest.source(emptyMap()); AtomicBoolean responseCalled = new AtomicBoolean(false); @@ -644,7 +643,7 @@ public void testDoExecuteCalledTwiceCorrectly() throws Exception { public void testNotFindDefaultPipelineFromTemplateMatches() { Exception exception = new Exception("fake exception"); - IndexRequest indexRequest = new IndexRequest("missing_index", "type", "id"); + IndexRequest indexRequest = new IndexRequest("missing_index").id("id"); indexRequest.source(emptyMap()); AtomicBoolean responseCalled = new AtomicBoolean(false); AtomicBoolean failureCalled = new AtomicBoolean(false); @@ -698,7 +697,7 @@ public void testFindDefaultPipelineFromTemplateMatch() { when(metadata.getTemplates()).thenReturn(templateMetadataBuilder.build()); when(metadata.indices()).thenReturn(ImmutableOpenMap.of()); - IndexRequest indexRequest = new IndexRequest("missing_index", "type", "id"); + IndexRequest indexRequest = new IndexRequest("missing_index").id("id"); indexRequest.source(emptyMap()); AtomicBoolean responseCalled = new AtomicBoolean(false); AtomicBoolean failureCalled = new AtomicBoolean(false); diff --git a/server/src/test/java/org/opensearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/opensearch/action/bulk/TransportBulkActionTests.java index 0ce8a2fc1a2ed..5eb395cb05971 100644 --- a/server/src/test/java/org/opensearch/action/bulk/TransportBulkActionTests.java +++ b/server/src/test/java/org/opensearch/action/bulk/TransportBulkActionTests.java @@ -168,7 +168,7 @@ public void tearDown() throws Exception { } public void testDeleteNonExistingDocDoesNotCreateIndex() throws Exception { - BulkRequest bulkRequest = new BulkRequest().add(new DeleteRequest("index", "type", "id")); + BulkRequest bulkRequest = new BulkRequest().add(new DeleteRequest("index", "id")); PlainActionFuture future = PlainActionFuture.newFuture(); ActionTestUtils.execute(bulkAction, null, bulkRequest, future); @@ -183,9 +183,7 @@ public void testDeleteNonExistingDocDoesNotCreateIndex() throws Exception { } public void testDeleteNonExistingDocExternalVersionCreatesIndex() throws Exception { - BulkRequest bulkRequest = new BulkRequest().add( - new DeleteRequest("index", "type", "id").versionType(VersionType.EXTERNAL).version(0) - ); + BulkRequest bulkRequest = new BulkRequest().add(new DeleteRequest("index", "id").versionType(VersionType.EXTERNAL).version(0)); PlainActionFuture future = PlainActionFuture.newFuture(); ActionTestUtils.execute(bulkAction, null, bulkRequest, future); @@ -194,9 +192,7 @@ public void testDeleteNonExistingDocExternalVersionCreatesIndex() throws Excepti } public void testDeleteNonExistingDocExternalGteVersionCreatesIndex() throws Exception { - BulkRequest bulkRequest = new BulkRequest().add( - new DeleteRequest("index2", "type", "id").versionType(VersionType.EXTERNAL_GTE).version(0) - ); + BulkRequest bulkRequest = new BulkRequest().add(new DeleteRequest("index2", "id").versionType(VersionType.EXTERNAL_GTE).version(0)); PlainActionFuture future = PlainActionFuture.newFuture(); ActionTestUtils.execute(bulkAction, null, bulkRequest, future); @@ -205,12 +201,10 @@ public void testDeleteNonExistingDocExternalGteVersionCreatesIndex() throws Exce } public void testGetIndexWriteRequest() throws Exception { - IndexRequest indexRequest = new IndexRequest("index", "type", "id1").source(emptyMap()); - UpdateRequest upsertRequest = new UpdateRequest("index", "type", "id1").upsert(indexRequest).script(mockScript("1")); - UpdateRequest docAsUpsertRequest = new UpdateRequest("index", "type", "id2").doc(indexRequest).docAsUpsert(true); - UpdateRequest scriptedUpsert = new UpdateRequest("index", "type", "id2").upsert(indexRequest) - .script(mockScript("1")) - .scriptedUpsert(true); + IndexRequest indexRequest = new IndexRequest("index").id("id1").source(emptyMap()); + UpdateRequest upsertRequest = new UpdateRequest("index", "id1").upsert(indexRequest).script(mockScript("1")); + UpdateRequest docAsUpsertRequest = new UpdateRequest("index", "id2").doc(indexRequest).docAsUpsert(true); + UpdateRequest scriptedUpsert = new UpdateRequest("index", "id2").upsert(indexRequest).script(mockScript("1")).scriptedUpsert(true); assertEquals(TransportBulkAction.getIndexWriteRequest(indexRequest), indexRequest); assertEquals(TransportBulkAction.getIndexWriteRequest(upsertRequest), indexRequest); @@ -220,7 +214,7 @@ public void testGetIndexWriteRequest() throws Exception { DeleteRequest deleteRequest = new DeleteRequest("index", "id"); assertNull(TransportBulkAction.getIndexWriteRequest(deleteRequest)); - UpdateRequest badUpsertRequest = new UpdateRequest("index", "type", "id1"); + UpdateRequest badUpsertRequest = new UpdateRequest("index", "id1"); assertNull(TransportBulkAction.getIndexWriteRequest(badUpsertRequest)); } diff --git a/server/src/test/java/org/opensearch/action/bulk/TransportBulkActionTookTests.java b/server/src/test/java/org/opensearch/action/bulk/TransportBulkActionTookTests.java index e2e4f4dd5daab..713b506f14299 100644 --- a/server/src/test/java/org/opensearch/action/bulk/TransportBulkActionTookTests.java +++ b/server/src/test/java/org/opensearch/action/bulk/TransportBulkActionTookTests.java @@ -55,7 +55,6 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.IndexingPressureService; -import org.opensearch.rest.action.document.RestBulkAction; import org.opensearch.indices.SystemIndices; import org.opensearch.tasks.Task; import org.opensearch.test.OpenSearchTestCase; @@ -242,8 +241,6 @@ public void onFailure(Exception e) { } }); - // This test's JSON contains outdated references to types - assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } static class Resolver extends IndexNameExpressionResolver { diff --git a/server/src/test/java/org/opensearch/action/bulk/TransportShardBulkActionTests.java b/server/src/test/java/org/opensearch/action/bulk/TransportShardBulkActionTests.java index 223ef3795d5e5..b1fa20307a12b 100644 --- a/server/src/test/java/org/opensearch/action/bulk/TransportShardBulkActionTests.java +++ b/server/src/test/java/org/opensearch/action/bulk/TransportShardBulkActionTests.java @@ -109,7 +109,6 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { private IndexMetadata indexMetadata() throws IOException { return IndexMetadata.builder("index") .putMapping( - "_doc", "{\"properties\":{\"foo\":{\"type\":\"text\",\"fields\":" + "{\"keyword\":{\"type\":\"keyword\",\"ignore_above\":256}}}}}" ) .settings(idxSettings) @@ -122,8 +121,7 @@ public void testExecuteBulkIndexRequest() throws Exception { BulkItemRequest[] items = new BulkItemRequest[1]; boolean create = randomBoolean(); - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE) - .create(create); + DocWriteRequest writeRequest = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE).create(create); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); items[0] = primaryRequest; BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); @@ -154,7 +152,7 @@ public void testExecuteBulkIndexRequest() throws Exception { // Assert that the document actually made it there assertDocCount(shard, 1); - writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE).create(true); + writeRequest = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE).create(true); primaryRequest = new BulkItemRequest(0, writeRequest); items[0] = primaryRequest; bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); @@ -186,7 +184,6 @@ public void testExecuteBulkIndexRequest() throws Exception { BulkItemResponse.Failure failure = primaryResponse.getFailure(); assertThat(failure.getIndex(), equalTo("index")); - assertThat(failure.getType(), equalTo("_doc")); assertThat(failure.getId(), equalTo("id")); assertThat(failure.getCause().getClass(), equalTo(VersionConflictEngineException.class)); assertThat(failure.getCause().getMessage(), containsString("version conflict, document already exists (current version [1])")); @@ -204,7 +201,8 @@ public void testSkipBulkIndexRequestIfAborted() throws Exception { BulkItemRequest[] items = new BulkItemRequest[randomIntBetween(2, 5)]; for (int i = 0; i < items.length; i++) { - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id_" + i).source(Requests.INDEX_CONTENT_TYPE) + DocWriteRequest writeRequest = new IndexRequest("index").id("id_" + i) + .source(Requests.INDEX_CONTENT_TYPE) .opType(DocWriteRequest.OpType.INDEX); items[i] = new BulkItemRequest(i, writeRequest); } @@ -236,7 +234,6 @@ public void testSkipBulkIndexRequestIfAborted() throws Exception { BulkItemResponse response = result.finalResponseIfSuccessful.getResponses()[i]; assertThat(response.getItemId(), equalTo(i)); assertThat(response.getIndex(), equalTo("index")); - assertThat(response.getType(), equalTo("_doc")); assertThat(response.getId(), equalTo("id_" + i)); assertThat(response.getOpType(), equalTo(DocWriteRequest.OpType.INDEX)); if (response.getItemId() == rejectItem.id()) { @@ -266,11 +263,7 @@ public void testSkipBulkIndexRequestIfAborted() throws Exception { public void testExecuteBulkIndexRequestWithMappingUpdates() throws Exception { BulkItemRequest[] items = new BulkItemRequest[1]; - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id").source( - Requests.INDEX_CONTENT_TYPE, - "foo", - "bar" - ); + DocWriteRequest writeRequest = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); items[0] = new BulkItemRequest(0, writeRequest); BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); @@ -292,19 +285,12 @@ public void testExecuteBulkIndexRequestWithMappingUpdates() throws Exception { // Pretend the mappings haven't made it to the node yet BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); AtomicInteger updateCalled = new AtomicInteger(); - TransportShardBulkAction.executeBulkItemRequest( - context, - null, - threadPool::absoluteTimeInMillis, - (update, shardId, type, listener) -> { - // There should indeed be a mapping update - assertNotNull(update); - updateCalled.incrementAndGet(); - listener.onResponse(null); - }, - listener -> listener.onResponse(null), - ASSERTING_DONE_LISTENER - ); + TransportShardBulkAction.executeBulkItemRequest(context, null, threadPool::absoluteTimeInMillis, (update, shardId, listener) -> { + // There should indeed be a mapping update + assertNotNull(update); + updateCalled.incrementAndGet(); + listener.onResponse(null); + }, listener -> listener.onResponse(null), ASSERTING_DONE_LISTENER); assertTrue(context.isInitial()); assertTrue(context.hasMoreOperationsToExecute()); @@ -321,7 +307,7 @@ public void testExecuteBulkIndexRequestWithMappingUpdates() throws Exception { context, null, threadPool::absoluteTimeInMillis, - (update, shardId, type, listener) -> fail("should not have had to update the mappings"), + (update, shardId, listener) -> fail("should not have had to update the mappings"), listener -> {}, ASSERTING_DONE_LISTENER ); @@ -344,11 +330,7 @@ public void testExecuteBulkIndexRequestWithErrorWhileUpdatingMapping() throws Ex IndexShard shard = newStartedShard(true); BulkItemRequest[] items = new BulkItemRequest[1]; - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id").source( - Requests.INDEX_CONTENT_TYPE, - "foo", - "bar" - ); + DocWriteRequest writeRequest = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); items[0] = new BulkItemRequest(0, writeRequest); BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); @@ -394,7 +376,6 @@ public void onFailure(final Exception e) { assertThat(primaryResponse.getFailureMessage(), containsString("some kind of exception")); BulkItemResponse.Failure failure = primaryResponse.getFailure(); assertThat(failure.getIndex(), equalTo("index")); - assertThat(failure.getType(), equalTo("_doc")); assertThat(failure.getId(), equalTo("id")); assertThat(failure.getCause(), equalTo(err)); @@ -405,7 +386,7 @@ public void testExecuteBulkDeleteRequest() throws Exception { IndexShard shard = newStartedShard(true); BulkItemRequest[] items = new BulkItemRequest[1]; - DocWriteRequest writeRequest = new DeleteRequest("index", "_doc", "id"); + DocWriteRequest writeRequest = new DeleteRequest("index", "id"); items[0] = new BulkItemRequest(0, writeRequest); BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); @@ -444,7 +425,6 @@ public void testExecuteBulkDeleteRequest() throws Exception { assertThat(response.getResult(), equalTo(DocWriteResponse.Result.NOT_FOUND)); assertThat(response.getShardId(), equalTo(shard.shardId())); assertThat(response.getIndex(), equalTo("index")); - assertThat(response.getType(), equalTo("_doc")); assertThat(response.getId(), equalTo("id")); assertThat(response.getVersion(), equalTo(1L)); assertThat(response.getSeqNo(), equalTo(0L)); @@ -453,7 +433,7 @@ public void testExecuteBulkDeleteRequest() throws Exception { // Now do the same after indexing the document, it should now find and delete the document indexDoc(shard, "_doc", "id", "{}"); - writeRequest = new DeleteRequest("index", "_doc", "id"); + writeRequest = new DeleteRequest("index", "id"); items[0] = new BulkItemRequest(0, writeRequest); bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); @@ -492,7 +472,6 @@ public void testExecuteBulkDeleteRequest() throws Exception { assertThat(response.getResult(), equalTo(DocWriteResponse.Result.DELETED)); assertThat(response.getShardId(), equalTo(shard.shardId())); assertThat(response.getIndex(), equalTo("index")); - assertThat(response.getType(), equalTo("_doc")); assertThat(response.getId(), equalTo("id")); assertThat(response.getVersion(), equalTo(3L)); assertThat(response.getSeqNo(), equalTo(2L)); @@ -503,14 +482,10 @@ public void testExecuteBulkDeleteRequest() throws Exception { } public void testNoopUpdateRequest() throws Exception { - DocWriteRequest writeRequest = new UpdateRequest("index", "_doc", "id").doc( - Requests.INDEX_CONTENT_TYPE, - "field", - "value" - ); + DocWriteRequest writeRequest = new UpdateRequest("index", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); - DocWriteResponse noopUpdateResponse = new UpdateResponse(shardId, "_doc", "id", 0, 2, 1, DocWriteResponse.Result.NOOP); + DocWriteResponse noopUpdateResponse = new UpdateResponse(shardId, "id", 0, 2, 1, DocWriteResponse.Result.NOOP); IndexShard shard = mock(IndexShard.class); @@ -556,14 +531,10 @@ public void testNoopUpdateRequest() throws Exception { public void testUpdateRequestWithFailure() throws Exception { IndexSettings indexSettings = new IndexSettings(indexMetadata(), Settings.EMPTY); - DocWriteRequest writeRequest = new UpdateRequest("index", "_doc", "id").doc( - Requests.INDEX_CONTENT_TYPE, - "field", - "value" - ); + DocWriteRequest writeRequest = new UpdateRequest("index", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); - IndexRequest updateResponse = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); + IndexRequest updateResponse = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); Exception err = new OpenSearchException("I'm dead <(x.x)>"); Engine.IndexResult indexResult = new Engine.IndexResult(err, 0, 0, 0); @@ -610,7 +581,6 @@ public void testUpdateRequestWithFailure() throws Exception { assertThat(primaryResponse.getFailureMessage(), containsString("I'm dead <(x.x)>")); BulkItemResponse.Failure failure = primaryResponse.getFailure(); assertThat(failure.getIndex(), equalTo("index")); - assertThat(failure.getType(), equalTo("_doc")); assertThat(failure.getId(), equalTo("id")); assertThat(failure.getCause(), equalTo(err)); assertThat(failure.getStatus(), equalTo(RestStatus.INTERNAL_SERVER_ERROR)); @@ -618,14 +588,10 @@ public void testUpdateRequestWithFailure() throws Exception { public void testUpdateRequestWithConflictFailure() throws Exception { IndexSettings indexSettings = new IndexSettings(indexMetadata(), Settings.EMPTY); - DocWriteRequest writeRequest = new UpdateRequest("index", "_doc", "id").doc( - Requests.INDEX_CONTENT_TYPE, - "field", - "value" - ); + DocWriteRequest writeRequest = new UpdateRequest("index", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); - IndexRequest updateResponse = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); + IndexRequest updateResponse = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); Exception err = new VersionConflictEngineException(shardId, "id", "I'm conflicted <(;_;)>"); Engine.IndexResult indexResult = new Engine.IndexResult(err, 0, 0, 0); @@ -670,7 +636,6 @@ public void testUpdateRequestWithConflictFailure() throws Exception { assertThat(primaryResponse.getFailureMessage(), containsString("I'm conflicted <(;_;)>")); BulkItemResponse.Failure failure = primaryResponse.getFailure(); assertThat(failure.getIndex(), equalTo("index")); - assertThat(failure.getType(), equalTo("_doc")); assertThat(failure.getId(), equalTo("id")); assertThat(failure.getCause(), equalTo(err)); assertThat(failure.getStatus(), equalTo(RestStatus.CONFLICT)); @@ -678,14 +643,10 @@ public void testUpdateRequestWithConflictFailure() throws Exception { public void testUpdateRequestWithSuccess() throws Exception { IndexSettings indexSettings = new IndexSettings(indexMetadata(), Settings.EMPTY); - DocWriteRequest writeRequest = new UpdateRequest("index", "_doc", "id").doc( - Requests.INDEX_CONTENT_TYPE, - "field", - "value" - ); + DocWriteRequest writeRequest = new UpdateRequest("index", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); - IndexRequest updateResponse = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); + IndexRequest updateResponse = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); boolean created = randomBoolean(); Translog.Location resultLocation = new Translog.Location(42, 42, 42); @@ -739,21 +700,17 @@ public void testUpdateRequestWithSuccess() throws Exception { public void testUpdateWithDelete() throws Exception { IndexSettings indexSettings = new IndexSettings(indexMetadata(), Settings.EMPTY); - DocWriteRequest writeRequest = new UpdateRequest("index", "_doc", "id").doc( - Requests.INDEX_CONTENT_TYPE, - "field", - "value" - ); + DocWriteRequest writeRequest = new UpdateRequest("index", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); - DeleteRequest updateResponse = new DeleteRequest("index", "_doc", "id"); + DeleteRequest updateResponse = new DeleteRequest("index", "id"); boolean found = randomBoolean(); Translog.Location resultLocation = new Translog.Location(42, 42, 42); final long resultSeqNo = 13; Engine.DeleteResult deleteResult = new FakeDeleteResult(1, 1, resultSeqNo, found, resultLocation); IndexShard shard = mock(IndexShard.class); - when(shard.applyDeleteOperationOnPrimary(anyLong(), any(), any(), any(), anyLong(), anyLong())).thenReturn(deleteResult); + when(shard.applyDeleteOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong())).thenReturn(deleteResult); when(shard.indexSettings()).thenReturn(indexSettings); when(shard.shardId()).thenReturn(shardId); @@ -796,11 +753,7 @@ public void testUpdateWithDelete() throws Exception { } public void testFailureDuringUpdateProcessing() throws Exception { - DocWriteRequest writeRequest = new UpdateRequest("index", "_doc", "id").doc( - Requests.INDEX_CONTENT_TYPE, - "field", - "value" - ); + DocWriteRequest writeRequest = new UpdateRequest("index", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); IndexShard shard = mock(IndexShard.class); @@ -833,7 +786,6 @@ public void testFailureDuringUpdateProcessing() throws Exception { assertThat(primaryResponse.getFailureMessage(), containsString("oops")); BulkItemResponse.Failure failure = primaryResponse.getFailure(); assertThat(failure.getIndex(), equalTo("index")); - assertThat(failure.getType(), equalTo("_doc")); assertThat(failure.getId(), equalTo("id")); assertThat(failure.getCause(), equalTo(err)); assertThat(failure.getStatus(), equalTo(RestStatus.INTERNAL_SERVER_ERROR)); @@ -844,7 +796,8 @@ public void testTranslogPositionToSync() throws Exception { BulkItemRequest[] items = new BulkItemRequest[randomIntBetween(2, 5)]; for (int i = 0; i < items.length; i++) { - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id_" + i).source(Requests.INDEX_CONTENT_TYPE) + DocWriteRequest writeRequest = new IndexRequest("index").id("id_" + i) + .source(Requests.INDEX_CONTENT_TYPE) .opType(DocWriteRequest.OpType.INDEX); items[i] = new BulkItemRequest(i, writeRequest); } @@ -881,14 +834,14 @@ public void testTranslogPositionToSync() throws Exception { public void testNoOpReplicationOnPrimaryDocumentFailure() throws Exception { final IndexShard shard = spy(newStartedShard(false)); - BulkItemRequest itemRequest = new BulkItemRequest(0, new IndexRequest("index", "_doc").source(Requests.INDEX_CONTENT_TYPE)); + BulkItemRequest itemRequest = new BulkItemRequest(0, new IndexRequest("index").source(Requests.INDEX_CONTENT_TYPE)); final String failureMessage = "simulated primary failure"; final IOException exception = new IOException(failureMessage); itemRequest.setPrimaryResponse( new BulkItemResponse( 0, randomFrom(DocWriteRequest.OpType.CREATE, DocWriteRequest.OpType.DELETE, DocWriteRequest.OpType.INDEX), - new BulkItemResponse.Failure("index", "_doc", "1", exception, 1L, 1L) + new BulkItemResponse.Failure("index", "1", exception, 1L, 1L) ) ); BulkItemRequest[] itemRequests = new BulkItemRequest[1]; @@ -901,12 +854,12 @@ public void testNoOpReplicationOnPrimaryDocumentFailure() throws Exception { public void testRetries() throws Exception { IndexSettings indexSettings = new IndexSettings(indexMetadata(), Settings.EMPTY); - UpdateRequest writeRequest = new UpdateRequest("index", "_doc", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); + UpdateRequest writeRequest = new UpdateRequest("index", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); // the beating will continue until success has come. writeRequest.retryOnConflict(Integer.MAX_VALUE); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); - IndexRequest updateResponse = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); + IndexRequest updateResponse = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); Exception err = new VersionConflictEngineException(shardId, "id", "I'm conflicted <(;_;)>"); Engine.IndexResult conflictedResult = new Engine.IndexResult(err, 0); @@ -1028,7 +981,7 @@ public void testForceExecutionOnRejectionAfterMappingUpdate() throws Exception { shard, null, rejectingThreadPool::absoluteTimeInMillis, - (update, shardId, type, listener) -> { + (update, shardId, listener) -> { // There should indeed be a mapping update assertNotNull(update); updateCalled.incrementAndGet(); @@ -1084,7 +1037,7 @@ private void randomlySetIgnoredPrimaryResponse(BulkItemRequest primaryRequest) { new BulkItemResponse( 0, DocWriteRequest.OpType.INDEX, - new IndexResponse(shardId, "_doc", "ignore-primary-response-on-primary", 42, 42, 42, false) + new IndexResponse(shardId, "ignore-primary-response-on-primary", 42, 42, 42, false) ) ); } @@ -1129,7 +1082,7 @@ public Translog.Location getTranslogLocation() { /** Doesn't perform any mapping updates */ public static class NoopMappingUpdatePerformer implements MappingUpdatePerformer { @Override - public void updateMappings(Mapping update, ShardId shardId, String type, ActionListener listener) { + public void updateMappings(Mapping update, ShardId shardId, ActionListener listener) { listener.onResponse(null); } } @@ -1143,7 +1096,7 @@ private class ThrowingMappingUpdatePerformer implements MappingUpdatePerformer { } @Override - public void updateMappings(Mapping update, ShardId shardId, String type, ActionListener listener) { + public void updateMappings(Mapping update, ShardId shardId, ActionListener listener) { listener.onFailure(e); } } diff --git a/server/src/test/java/org/opensearch/action/delete/DeleteRequestTests.java b/server/src/test/java/org/opensearch/action/delete/DeleteRequestTests.java index acd07b781be0a..0dda8969e7d74 100644 --- a/server/src/test/java/org/opensearch/action/delete/DeleteRequestTests.java +++ b/server/src/test/java/org/opensearch/action/delete/DeleteRequestTests.java @@ -42,23 +42,14 @@ public class DeleteRequestTests extends OpenSearchTestCase { public void testValidation() { { - final DeleteRequest request = new DeleteRequest("index4", "_doc", "0"); + final DeleteRequest request = new DeleteRequest("index4", "0"); final ActionRequestValidationException validate = request.validate(); assertThat(validate, nullValue()); } { - // Empty types are accepted but fail validation - final DeleteRequest request = new DeleteRequest("index4", "", randomBoolean() ? "" : null); - final ActionRequestValidationException validate = request.validate(); - - assertThat(validate, not(nullValue())); - assertThat(validate.validationErrors(), hasItems("type is missing", "id is missing")); - } - { - // Null types are defaulted - final DeleteRequest request = new DeleteRequest("index4", randomBoolean() ? "" : null); + final DeleteRequest request = new DeleteRequest("index4", null); final ActionRequestValidationException validate = request.validate(); assertThat(validate, not(nullValue())); diff --git a/server/src/test/java/org/opensearch/action/delete/DeleteResponseTests.java b/server/src/test/java/org/opensearch/action/delete/DeleteResponseTests.java index 5b2a1d61614cb..e6c80b8ebdb61 100644 --- a/server/src/test/java/org/opensearch/action/delete/DeleteResponseTests.java +++ b/server/src/test/java/org/opensearch/action/delete/DeleteResponseTests.java @@ -55,21 +55,21 @@ public class DeleteResponseTests extends OpenSearchTestCase { public void testToXContent() { { - DeleteResponse response = new DeleteResponse(new ShardId("index", "index_uuid", 0), "type", "id", 3, 17, 5, true); + DeleteResponse response = new DeleteResponse(new ShardId("index", "index_uuid", 0), "id", 3, 17, 5, true); String output = Strings.toString(response); assertEquals( - "{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":5,\"result\":\"deleted\"," + "{\"_index\":\"index\",\"_id\":\"id\",\"_version\":5,\"result\":\"deleted\"," + "\"_shards\":null,\"_seq_no\":3,\"_primary_term\":17}", output ); } { - DeleteResponse response = new DeleteResponse(new ShardId("index", "index_uuid", 0), "type", "id", -1, 0, 7, true); + DeleteResponse response = new DeleteResponse(new ShardId("index", "index_uuid", 0), "id", -1, 0, 7, true); response.setForcedRefresh(true); response.setShardInfo(new ReplicationResponse.ShardInfo(10, 5)); String output = Strings.toString(response); assertEquals( - "{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":7,\"result\":\"deleted\"," + "{\"_index\":\"index\",\"_id\":\"id\",\"_version\":7,\"result\":\"deleted\"," + "\"forced_refresh\":true,\"_shards\":{\"total\":10,\"successful\":5,\"failed\":0}}", output ); @@ -141,19 +141,11 @@ public static Tuple randomDeleteResponse() { Tuple shardInfos = RandomObjects.randomShardInfo(random()); - DeleteResponse actual = new DeleteResponse(new ShardId(index, indexUUid, shardId), type, id, seqNo, primaryTerm, version, found); + DeleteResponse actual = new DeleteResponse(new ShardId(index, indexUUid, shardId), id, seqNo, primaryTerm, version, found); actual.setForcedRefresh(forcedRefresh); actual.setShardInfo(shardInfos.v1()); - DeleteResponse expected = new DeleteResponse( - new ShardId(index, INDEX_UUID_NA_VALUE, -1), - type, - id, - seqNo, - primaryTerm, - version, - found - ); + DeleteResponse expected = new DeleteResponse(new ShardId(index, INDEX_UUID_NA_VALUE, -1), id, seqNo, primaryTerm, version, found); expected.setForcedRefresh(forcedRefresh); expected.setShardInfo(shardInfos.v2()); diff --git a/server/src/test/java/org/opensearch/action/explain/ExplainRequestTests.java b/server/src/test/java/org/opensearch/action/explain/ExplainRequestTests.java index 8dda2a94a0a76..6642f5c10522e 100644 --- a/server/src/test/java/org/opensearch/action/explain/ExplainRequestTests.java +++ b/server/src/test/java/org/opensearch/action/explain/ExplainRequestTests.java @@ -68,9 +68,9 @@ public void setUp() throws Exception { public void testSerialize() throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { - ExplainRequest request = new ExplainRequest("index", "type", "id"); + ExplainRequest request = new ExplainRequest("index", "id"); request.fetchSourceContext(new FetchSourceContext(true, new String[] { "field1.*" }, new String[] { "field2.*" })); - request.filteringAlias(new AliasFilter(QueryBuilders.termQuery("filter_field", "value"), new String[] { "alias0", "alias1" })); + request.filteringAlias(new AliasFilter(QueryBuilders.termQuery("filter_field", "value"), "alias0", "alias1")); request.preference("the_preference"); request.query(QueryBuilders.termQuery("field", "value")); request.storedFields(new String[] { "field1", "field2" }); @@ -90,7 +90,7 @@ public void testSerialize() throws IOException { public void testValidation() { { - final ExplainRequest request = new ExplainRequest("index4", "_doc", "0"); + final ExplainRequest request = new ExplainRequest("index4", "0"); request.query(QueryBuilders.termQuery("field", "value")); final ActionRequestValidationException validate = request.validate(); @@ -99,12 +99,12 @@ public void testValidation() { } { - final ExplainRequest request = new ExplainRequest("index4", randomBoolean() ? "" : null, randomBoolean() ? "" : null); + final ExplainRequest request = new ExplainRequest("index4", randomBoolean() ? "" : null); request.query(QueryBuilders.termQuery("field", "value")); final ActionRequestValidationException validate = request.validate(); assertThat(validate, not(nullValue())); - assertThat(validate.validationErrors(), hasItems("type is missing", "id is missing")); + assertThat(validate.validationErrors(), hasItems("id is missing")); } } } diff --git a/server/src/test/java/org/opensearch/action/explain/ExplainResponseTests.java b/server/src/test/java/org/opensearch/action/explain/ExplainResponseTests.java index 8b95b06b0ee8b..b6918bd3590a9 100644 --- a/server/src/test/java/org/opensearch/action/explain/ExplainResponseTests.java +++ b/server/src/test/java/org/opensearch/action/explain/ExplainResponseTests.java @@ -71,14 +71,12 @@ protected Writeable.Reader instanceReader() { @Override protected ExplainResponse createTestInstance() { String index = randomAlphaOfLength(5); - String type = randomAlphaOfLength(5); String id = String.valueOf(randomIntBetween(1, 100)); boolean exist = randomBoolean(); Explanation explanation = randomExplanation(randomExplanation(randomExplanation()), randomExplanation()); String fieldName = randomAlphaOfLength(10); List values = Arrays.asList(randomAlphaOfLengthBetween(3, 10), randomInt(), randomLong(), randomDouble(), randomBoolean()); GetResult getResult = new GetResult( - randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10), 0, @@ -89,7 +87,7 @@ protected ExplainResponse createTestInstance() { singletonMap(fieldName, new DocumentField(fieldName, values)), null ); - return new ExplainResponse(index, type, id, exist, explanation, getResult); + return new ExplainResponse(index, id, exist, explanation, getResult); } @Override @@ -104,7 +102,6 @@ public void testToXContent() throws IOException { boolean exist = true; Explanation explanation = Explanation.match(1.0f, "description", Collections.emptySet()); GetResult getResult = new GetResult( - null, null, null, 0, @@ -115,7 +112,7 @@ public void testToXContent() throws IOException { singletonMap("field1", new DocumentField("field1", singletonList("value1"))), null ); - ExplainResponse response = new ExplainResponse(index, type, id, exist, explanation, getResult); + ExplainResponse response = new ExplainResponse(index, id, exist, explanation, getResult); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); response.toXContent(builder, ToXContent.EMPTY_PARAMS); @@ -124,7 +121,6 @@ public void testToXContent() throws IOException { String expectedResponse = ("{\n" + " \"_index\":\"index\",\n" - + " \"_type\":\"type\",\n" + " \"_id\":\"1\",\n" + " \"matched\":true,\n" + " \"explanation\":{\n" diff --git a/server/src/test/java/org/opensearch/action/get/GetRequestTests.java b/server/src/test/java/org/opensearch/action/get/GetRequestTests.java index 91fcf57be8bef..13d12cdc8af87 100644 --- a/server/src/test/java/org/opensearch/action/get/GetRequestTests.java +++ b/server/src/test/java/org/opensearch/action/get/GetRequestTests.java @@ -42,19 +42,19 @@ public class GetRequestTests extends OpenSearchTestCase { public void testValidation() { { - final GetRequest request = new GetRequest("index4", "_doc", "0"); + final GetRequest request = new GetRequest("index4", "0"); final ActionRequestValidationException validate = request.validate(); assertThat(validate, nullValue()); } { - final GetRequest request = new GetRequest("index4", randomBoolean() ? "" : null, randomBoolean() ? "" : null); + final GetRequest request = new GetRequest("index4", randomBoolean() ? "" : null); final ActionRequestValidationException validate = request.validate(); assertThat(validate, not(nullValue())); - assertEquals(2, validate.validationErrors().size()); - assertThat(validate.validationErrors(), hasItems("type is missing", "id is missing")); + assertEquals(1, validate.validationErrors().size()); + assertThat(validate.validationErrors(), hasItems("id is missing")); } } } diff --git a/server/src/test/java/org/opensearch/action/get/GetResponseTests.java b/server/src/test/java/org/opensearch/action/get/GetResponseTests.java index 108eeda79e173..39b330fa10a7b 100644 --- a/server/src/test/java/org/opensearch/action/get/GetResponseTests.java +++ b/server/src/test/java/org/opensearch/action/get/GetResponseTests.java @@ -108,7 +108,6 @@ public void testToXContent() { GetResponse getResponse = new GetResponse( new GetResult( "index", - "type", "id", 0, 1, @@ -121,17 +120,15 @@ public void testToXContent() { ); String output = Strings.toString(getResponse); assertEquals( - "{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":1,\"_seq_no\":0,\"_primary_term\":1," + "{\"_index\":\"index\",\"_id\":\"id\",\"_version\":1,\"_seq_no\":0,\"_primary_term\":1," + "\"found\":true,\"_source\":{ \"field1\" : \"value1\", \"field2\":\"value2\"},\"fields\":{\"field1\":[\"value1\"]}}", output ); } { - GetResponse getResponse = new GetResponse( - new GetResult("index", "type", "id", UNASSIGNED_SEQ_NO, 0, 1, false, null, null, null) - ); + GetResponse getResponse = new GetResponse(new GetResult("index", "id", UNASSIGNED_SEQ_NO, 0, 1, false, null, null, null)); String output = Strings.toString(getResponse); - assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"found\":false}", output); + assertEquals("{\"_index\":\"index\",\"_id\":\"id\",\"found\":false}", output); } } @@ -139,7 +136,6 @@ public void testToString() { GetResponse getResponse = new GetResponse( new GetResult( "index", - "type", "id", 0, 1, @@ -151,7 +147,7 @@ public void testToString() { ) ); assertEquals( - "{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":1,\"_seq_no\":0,\"_primary_term\":1," + "{\"_index\":\"index\",\"_id\":\"id\",\"_version\":1,\"_seq_no\":0,\"_primary_term\":1," + "\"found\":true,\"_source\":{ \"field1\" : \"value1\", \"field2\":\"value2\"},\"fields\":{\"field1\":[\"value1\"]}}", getResponse.toString() ); @@ -167,7 +163,7 @@ public void testEqualsAndHashcode() { public void testFromXContentThrowsParsingException() throws IOException { GetResponse getResponse = new GetResponse( - new GetResult(null, null, null, UNASSIGNED_SEQ_NO, 0, randomIntBetween(1, 5), randomBoolean(), null, null, null) + new GetResult(null, null, UNASSIGNED_SEQ_NO, 0, randomIntBetween(1, 5), randomBoolean(), null, null, null) ); XContentType xContentType = randomFrom(XContentType.values()); @@ -175,7 +171,7 @@ public void testFromXContentThrowsParsingException() throws IOException { try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { ParsingException exception = expectThrows(ParsingException.class, () -> GetResponse.fromXContent(parser)); - assertEquals("Missing required fields [_index,_type,_id]", exception.getMessage()); + assertEquals("Missing required fields [_index,_id]", exception.getMessage()); } } diff --git a/server/src/test/java/org/opensearch/action/get/MultiGetRequestTests.java b/server/src/test/java/org/opensearch/action/get/MultiGetRequestTests.java index 54432fa2fb9fb..cf26117210dfb 100644 --- a/server/src/test/java/org/opensearch/action/get/MultiGetRequestTests.java +++ b/server/src/test/java/org/opensearch/action/get/MultiGetRequestTests.java @@ -71,9 +71,8 @@ public void testAddWithInvalidKey() throws IOException { final MultiGetRequest mgr = new MultiGetRequest(); final ParsingException e = expectThrows(ParsingException.class, () -> { final String defaultIndex = randomAlphaOfLength(5); - final String defaultType = randomAlphaOfLength(3); final FetchSourceContext fetchSource = FetchSourceContext.FETCH_SOURCE; - mgr.add(defaultIndex, defaultType, null, fetchSource, null, parser, true); + mgr.add(defaultIndex, null, fetchSource, null, parser, true); }); assertThat(e.toString(), containsString("unknown key [doc] for a START_ARRAY, expected [docs] or [ids]")); } @@ -95,9 +94,8 @@ public void testUnexpectedField() throws IOException { final MultiGetRequest mgr = new MultiGetRequest(); final ParsingException e = expectThrows(ParsingException.class, () -> { final String defaultIndex = randomAlphaOfLength(5); - final String defaultType = randomAlphaOfLength(3); final FetchSourceContext fetchSource = FetchSourceContext.FETCH_SOURCE; - mgr.add(defaultIndex, defaultType, null, fetchSource, null, parser, true); + mgr.add(defaultIndex, null, fetchSource, null, parser, true); }); assertThat(e.toString(), containsString("unexpected token [START_OBJECT], expected [FIELD_NAME] or [START_ARRAY]")); } @@ -118,7 +116,7 @@ public void testAddWithValidSourceValueIsAccepted() throws Exception { ); MultiGetRequest multiGetRequest = new MultiGetRequest(); - multiGetRequest.add(randomAlphaOfLength(5), randomAlphaOfLength(3), null, FetchSourceContext.FETCH_SOURCE, null, parser, true); + multiGetRequest.add(randomAlphaOfLength(5), null, FetchSourceContext.FETCH_SOURCE, null, parser, true); assertEquals(2, multiGetRequest.getItems().size()); } @@ -130,7 +128,7 @@ public void testXContentSerialization() throws IOException { BytesReference shuffled = toShuffledXContent(expected, xContentType, ToXContent.EMPTY_PARAMS, false); try (XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled)) { MultiGetRequest actual = new MultiGetRequest(); - actual.add(null, null, null, null, null, parser, true); + actual.add(null, null, null, null, parser, true); assertThat(parser.nextToken(), nullValue()); assertThat(actual.items.size(), equalTo(expected.items.size())); @@ -147,7 +145,7 @@ private MultiGetRequest createTestInstance() { int numItems = randomIntBetween(0, 128); MultiGetRequest request = new MultiGetRequest(); for (int i = 0; i < numItems; i++) { - MultiGetRequest.Item item = new MultiGetRequest.Item(randomAlphaOfLength(4), randomAlphaOfLength(4), randomAlphaOfLength(4)); + MultiGetRequest.Item item = new MultiGetRequest.Item(randomAlphaOfLength(4), randomAlphaOfLength(4)); if (randomBoolean()) { item.version(randomNonNegativeLong()); } diff --git a/server/src/test/java/org/opensearch/action/get/MultiGetResponseTests.java b/server/src/test/java/org/opensearch/action/get/MultiGetResponseTests.java index 36960ac2f322d..a167f41d41b66 100644 --- a/server/src/test/java/org/opensearch/action/get/MultiGetResponseTests.java +++ b/server/src/test/java/org/opensearch/action/get/MultiGetResponseTests.java @@ -64,7 +64,6 @@ public void testFromXContent() throws IOException { MultiGetItemResponse expectedItem = expected.getResponses()[i]; MultiGetItemResponse actualItem = parsed.getResponses()[i]; assertThat(actualItem.getIndex(), equalTo(expectedItem.getIndex())); - assertThat(actualItem.getType(), equalTo(expectedItem.getType())); assertThat(actualItem.getId(), equalTo(expectedItem.getId())); if (expectedItem.isFailed()) { assertThat(actualItem.isFailed(), is(true)); @@ -84,18 +83,7 @@ private static MultiGetResponse createTestInstance() { if (randomBoolean()) { items[i] = new MultiGetItemResponse( new GetResponse( - new GetResult( - randomAlphaOfLength(4), - randomAlphaOfLength(4), - randomAlphaOfLength(4), - 0, - 1, - randomNonNegativeLong(), - true, - null, - null, - null - ) + new GetResult(randomAlphaOfLength(4), randomAlphaOfLength(4), 0, 1, randomNonNegativeLong(), true, null, null, null) ), null ); @@ -103,7 +91,6 @@ private static MultiGetResponse createTestInstance() { items[i] = new MultiGetItemResponse( null, new MultiGetResponse.Failure( - randomAlphaOfLength(4), randomAlphaOfLength(4), randomAlphaOfLength(4), new RuntimeException(randomAlphaOfLength(4)) diff --git a/server/src/test/java/org/opensearch/action/get/MultiGetShardRequestTests.java b/server/src/test/java/org/opensearch/action/get/MultiGetShardRequestTests.java index fc4c7c18d528f..5bf4f50c50c63 100644 --- a/server/src/test/java/org/opensearch/action/get/MultiGetShardRequestTests.java +++ b/server/src/test/java/org/opensearch/action/get/MultiGetShardRequestTests.java @@ -58,11 +58,7 @@ public void testSerialization() throws IOException { MultiGetShardRequest multiGetShardRequest = new MultiGetShardRequest(multiGetRequest, "index", 0); int numItems = iterations(10, 30); for (int i = 0; i < numItems; i++) { - MultiGetRequest.Item item = new MultiGetRequest.Item( - "alias-" + randomAlphaOfLength(randomIntBetween(1, 10)), - "type", - "id-" + i - ); + MultiGetRequest.Item item = new MultiGetRequest.Item("alias-" + randomAlphaOfLength(randomIntBetween(1, 10)), "id-" + i); if (randomBoolean()) { int numFields = randomIntBetween(1, 5); String[] fields = new String[numFields]; @@ -97,7 +93,6 @@ public void testSerialization() throws IOException { MultiGetRequest.Item item = multiGetShardRequest.items.get(i); MultiGetRequest.Item item2 = multiGetShardRequest2.items.get(i); assertThat(item2.index(), equalTo(item.index())); - assertThat(item2.type(), equalTo(item.type())); assertThat(item2.id(), equalTo(item.id())); assertThat(item2.storedFields(), equalTo(item.storedFields())); assertThat(item2.version(), equalTo(item.version())); diff --git a/server/src/test/java/org/opensearch/action/get/TransportMultiGetActionTests.java b/server/src/test/java/org/opensearch/action/get/TransportMultiGetActionTests.java index 9141b86ded5a7..1184b05461025 100644 --- a/server/src/test/java/org/opensearch/action/get/TransportMultiGetActionTests.java +++ b/server/src/test/java/org/opensearch/action/get/TransportMultiGetActionTests.java @@ -127,7 +127,6 @@ public TaskManager getTaskManager() { .put(IndexMetadata.SETTING_INDEX_UUID, index1.getUUID()) ) .putMapping( - "_doc", XContentHelper.convertToJson( BytesReference.bytes( XContentFactory.jsonBuilder() @@ -153,7 +152,6 @@ public TaskManager getTaskManager() { .put(IndexMetadata.SETTING_INDEX_UUID, index1.getUUID()) ) .putMapping( - "_doc", XContentHelper.convertToJson( BytesReference.bytes( XContentFactory.jsonBuilder() @@ -225,8 +223,8 @@ public void testTransportMultiGetAction() { final Task task = createTask(); final NodeClient client = new NodeClient(Settings.EMPTY, threadPool); final MultiGetRequestBuilder request = new MultiGetRequestBuilder(client, MultiGetAction.INSTANCE); - request.add(new MultiGetRequest.Item("index1", "_doc", "1")); - request.add(new MultiGetRequest.Item("index1", "_doc", "2")); + request.add(new MultiGetRequest.Item("index1", "1")); + request.add(new MultiGetRequest.Item("index1", "2")); final AtomicBoolean shardActionInvoked = new AtomicBoolean(false); transportAction = new TransportMultiGetAction( @@ -257,8 +255,8 @@ public void testTransportMultiGetAction_withMissingRouting() { final Task task = createTask(); final NodeClient client = new NodeClient(Settings.EMPTY, threadPool); final MultiGetRequestBuilder request = new MultiGetRequestBuilder(client, MultiGetAction.INSTANCE); - request.add(new MultiGetRequest.Item("index2", "_doc", "1").routing("1")); - request.add(new MultiGetRequest.Item("index2", "_doc", "2")); + request.add(new MultiGetRequest.Item("index2", "1").routing("1")); + request.add(new MultiGetRequest.Item("index2", "2")); final AtomicBoolean shardActionInvoked = new AtomicBoolean(false); transportAction = new TransportMultiGetAction( diff --git a/server/src/test/java/org/opensearch/action/index/IndexRequestTests.java b/server/src/test/java/org/opensearch/action/index/IndexRequestTests.java index 16d7b0348b7de..21305957d802b 100644 --- a/server/src/test/java/org/opensearch/action/index/IndexRequestTests.java +++ b/server/src/test/java/org/opensearch/action/index/IndexRequestTests.java @@ -137,11 +137,10 @@ public void testAutoGenIdTimestampIsSet() { public void testIndexResponse() { ShardId shardId = new ShardId(randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10), randomIntBetween(0, 1000)); - String type = randomAlphaOfLengthBetween(3, 10); String id = randomAlphaOfLengthBetween(3, 10); long version = randomLong(); boolean created = randomBoolean(); - IndexResponse indexResponse = new IndexResponse(shardId, type, id, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, version, created); + IndexResponse indexResponse = new IndexResponse(shardId, id, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, version, created); int total = randomIntBetween(1, 10); int successful = randomIntBetween(1, 10); ReplicationResponse.ShardInfo shardInfo = new ReplicationResponse.ShardInfo(total, successful); @@ -151,7 +150,6 @@ public void testIndexResponse() { forcedRefresh = randomBoolean(); indexResponse.setForcedRefresh(forcedRefresh); } - assertEquals(type, indexResponse.getType()); assertEquals(id, indexResponse.getId()); assertEquals(version, indexResponse.getVersion()); assertEquals(shardId, indexResponse.getShardId()); @@ -162,8 +160,6 @@ public void testIndexResponse() { assertEquals( "IndexResponse[index=" + shardId.getIndexName() - + ",type=" - + type + ",id=" + id + ",version=" @@ -220,13 +216,13 @@ public void testToStringSizeLimit() throws UnsupportedEncodingException { String source = "{\"name\":\"value\"}"; request.source(source, XContentType.JSON); - assertEquals("index {[index][_doc][null], source[" + source + "]}", request.toString()); + assertEquals("index {[index][null], source[" + source + "]}", request.toString()); source = "{\"name\":\"" + randomUnicodeOfLength(IndexRequest.MAX_SOURCE_LENGTH_IN_TOSTRING) + "\"}"; request.source(source, XContentType.JSON); int actualBytes = source.getBytes("UTF-8").length; assertEquals( - "index {[index][_doc][null], source[n/a, actual length: [" + "index {[index][null], source[n/a, actual length: [" + new ByteSizeValue(actualBytes).toString() + "], max length: " + new ByteSizeValue(IndexRequest.MAX_SOURCE_LENGTH_IN_TOSTRING).toString() diff --git a/server/src/test/java/org/opensearch/action/index/IndexResponseTests.java b/server/src/test/java/org/opensearch/action/index/IndexResponseTests.java index ebe8d0b2aaa1b..25d6a60299848 100644 --- a/server/src/test/java/org/opensearch/action/index/IndexResponseTests.java +++ b/server/src/test/java/org/opensearch/action/index/IndexResponseTests.java @@ -56,21 +56,21 @@ public class IndexResponseTests extends OpenSearchTestCase { public void testToXContent() { { - IndexResponse indexResponse = new IndexResponse(new ShardId("index", "index_uuid", 0), "type", "id", 3, 17, 5, true); + IndexResponse indexResponse = new IndexResponse(new ShardId("index", "index_uuid", 0), "id", 3, 17, 5, true); String output = Strings.toString(indexResponse); assertEquals( - "{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":5,\"result\":\"created\",\"_shards\":null," + "{\"_index\":\"index\",\"_id\":\"id\",\"_version\":5,\"result\":\"created\",\"_shards\":null," + "\"_seq_no\":3,\"_primary_term\":17}", output ); } { - IndexResponse indexResponse = new IndexResponse(new ShardId("index", "index_uuid", 0), "type", "id", -1, 17, 7, true); + IndexResponse indexResponse = new IndexResponse(new ShardId("index", "index_uuid", 0), "id", -1, 17, 7, true); indexResponse.setForcedRefresh(true); indexResponse.setShardInfo(new ReplicationResponse.ShardInfo(10, 5)); String output = Strings.toString(indexResponse); assertEquals( - "{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":7,\"result\":\"created\"," + "{\"_index\":\"index\",\"_id\":\"id\",\"_version\":7,\"result\":\"created\"," + "\"forced_refresh\":true,\"_shards\":{\"total\":10,\"successful\":5,\"failed\":0}}", output ); @@ -124,7 +124,6 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws public static void assertDocWriteResponse(DocWriteResponse expected, DocWriteResponse actual) { assertEquals(expected.getIndex(), actual.getIndex()); - assertEquals(expected.getType(), actual.getType()); assertEquals(expected.getId(), actual.getId()); assertEquals(expected.getSeqNo(), actual.getSeqNo()); assertEquals(expected.getResult(), actual.getResult()); @@ -144,7 +143,6 @@ public static Tuple randomIndexResponse() { String index = randomAlphaOfLength(5); String indexUUid = randomAlphaOfLength(5); int shardId = randomIntBetween(0, 5); - String type = randomAlphaOfLength(5); String id = randomAlphaOfLength(5); long seqNo = randomFrom(SequenceNumbers.UNASSIGNED_SEQ_NO, randomNonNegativeLong(), (long) randomIntBetween(0, 10000)); long primaryTerm = seqNo == SequenceNumbers.UNASSIGNED_SEQ_NO ? 0 : randomIntBetween(1, 10000); @@ -154,19 +152,11 @@ public static Tuple randomIndexResponse() { Tuple shardInfos = RandomObjects.randomShardInfo(random()); - IndexResponse actual = new IndexResponse(new ShardId(index, indexUUid, shardId), type, id, seqNo, primaryTerm, version, created); + IndexResponse actual = new IndexResponse(new ShardId(index, indexUUid, shardId), id, seqNo, primaryTerm, version, created); actual.setForcedRefresh(forcedRefresh); actual.setShardInfo(shardInfos.v1()); - IndexResponse expected = new IndexResponse( - new ShardId(index, INDEX_UUID_NA_VALUE, -1), - type, - id, - seqNo, - primaryTerm, - version, - created - ); + IndexResponse expected = new IndexResponse(new ShardId(index, INDEX_UUID_NA_VALUE, -1), id, seqNo, primaryTerm, version, created); expected.setForcedRefresh(forcedRefresh); expected.setShardInfo(shardInfos.v2()); diff --git a/server/src/test/java/org/opensearch/action/ingest/SimulateExecutionServiceTests.java b/server/src/test/java/org/opensearch/action/ingest/SimulateExecutionServiceTests.java index 74a787244ca42..ff7b0dddb33a3 100644 --- a/server/src/test/java/org/opensearch/action/ingest/SimulateExecutionServiceTests.java +++ b/server/src/test/java/org/opensearch/action/ingest/SimulateExecutionServiceTests.java @@ -352,7 +352,7 @@ public void testAsyncSimulation() throws Exception { int numDocs = randomIntBetween(1, 64); List documents = new ArrayList<>(numDocs); for (int id = 0; id < numDocs; id++) { - documents.add(new IngestDocument("_index", "_type", Integer.toString(id), null, 0L, VersionType.INTERNAL, new HashMap<>())); + documents.add(new IngestDocument("_index", Integer.toString(id), null, 0L, VersionType.INTERNAL, new HashMap<>())); } Processor processor1 = new AbstractProcessor(null, null) { diff --git a/server/src/test/java/org/opensearch/action/ingest/SimulatePipelineRequestParsingTests.java b/server/src/test/java/org/opensearch/action/ingest/SimulatePipelineRequestParsingTests.java index f732178821b4a..c85c0a01de63e 100644 --- a/server/src/test/java/org/opensearch/action/ingest/SimulatePipelineRequestParsingTests.java +++ b/server/src/test/java/org/opensearch/action/ingest/SimulatePipelineRequestParsingTests.java @@ -86,15 +86,7 @@ public void init() throws IOException { when(ingestService.getProcessorFactories()).thenReturn(registry); } - public void testParseUsingPipelineStoreNoType() throws Exception { - innerTestParseUsingPipelineStore(false); - } - - public void testParseUsingPipelineStoreWithType() throws Exception { - innerTestParseUsingPipelineStore(true); - } - - private void innerTestParseUsingPipelineStore(boolean useExplicitType) throws Exception { + public void testParseUsingPipelineStore(boolean useExplicitType) throws Exception { int numDocs = randomIntBetween(1, 10); Map requestContent = new HashMap<>(); @@ -104,12 +96,8 @@ private void innerTestParseUsingPipelineStore(boolean useExplicitType) throws Ex for (int i = 0; i < numDocs; i++) { Map doc = new HashMap<>(); String index = randomAlphaOfLengthBetween(1, 10); - String type = randomAlphaOfLengthBetween(1, 10); String id = randomAlphaOfLengthBetween(1, 10); doc.put(INDEX.getFieldName(), index); - if (useExplicitType) { - doc.put(TYPE.getFieldName(), type); - } doc.put(ID.getFieldName(), id); String fieldName = randomAlphaOfLengthBetween(1, 10); String fieldValue = randomAlphaOfLengthBetween(1, 10); @@ -117,11 +105,6 @@ private void innerTestParseUsingPipelineStore(boolean useExplicitType) throws Ex docs.add(doc); Map expectedDoc = new HashMap<>(); expectedDoc.put(INDEX.getFieldName(), index); - if (useExplicitType) { - expectedDoc.put(TYPE.getFieldName(), type); - } else { - expectedDoc.put(TYPE.getFieldName(), "_doc"); - } expectedDoc.put(ID.getFieldName(), id); expectedDoc.put(Fields.SOURCE, Collections.singletonMap(fieldName, fieldValue)); expectedDocs.add(expectedDoc); @@ -140,7 +123,6 @@ private void innerTestParseUsingPipelineStore(boolean useExplicitType) throws Ex Map expectedDocument = expectedDocsIterator.next(); Map metadataMap = ingestDocument.extractMetadata(); assertThat(metadataMap.get(INDEX), equalTo(expectedDocument.get(INDEX.getFieldName()))); - assertThat(metadataMap.get(TYPE), equalTo(expectedDocument.get(TYPE.getFieldName()))); assertThat(metadataMap.get(ID), equalTo(expectedDocument.get(ID.getFieldName()))); assertThat(ingestDocument.getSourceAndMetadata(), equalTo(expectedDocument.get(Fields.SOURCE))); } @@ -148,9 +130,6 @@ private void innerTestParseUsingPipelineStore(boolean useExplicitType) throws Ex assertThat(actualRequest.getPipeline().getId(), equalTo(SIMULATED_PIPELINE_ID)); assertThat(actualRequest.getPipeline().getDescription(), nullValue()); assertThat(actualRequest.getPipeline().getProcessors().size(), equalTo(1)); - if (useExplicitType) { - assertWarnings("[types removal] specifying _type in pipeline simulation requests is deprecated"); - } } public void testParseWithProvidedPipelineNoType() throws Exception { diff --git a/server/src/test/java/org/opensearch/action/resync/ResyncReplicationRequestTests.java b/server/src/test/java/org/opensearch/action/resync/ResyncReplicationRequestTests.java index f8e76b5e85b61..a078966e3aa80 100644 --- a/server/src/test/java/org/opensearch/action/resync/ResyncReplicationRequestTests.java +++ b/server/src/test/java/org/opensearch/action/resync/ResyncReplicationRequestTests.java @@ -48,7 +48,7 @@ public class ResyncReplicationRequestTests extends OpenSearchTestCase { public void testSerialization() throws IOException { final byte[] bytes = "{}".getBytes(Charset.forName("UTF-8")); - final Translog.Index index = new Translog.Index("type", "id", 0, randomNonNegativeLong(), randomNonNegativeLong(), bytes, null, -1); + final Translog.Index index = new Translog.Index("id", 0, randomNonNegativeLong(), randomNonNegativeLong(), bytes, null, -1); final ShardId shardId = new ShardId(new Index("index", "uuid"), 0); final ResyncReplicationRequest before = new ResyncReplicationRequest(shardId, 42L, 100, new Translog.Operation[] { index }); diff --git a/server/src/test/java/org/opensearch/action/resync/TransportResyncReplicationActionTests.java b/server/src/test/java/org/opensearch/action/resync/TransportResyncReplicationActionTests.java index 2b6b913b080ec..a544bad4cd9e6 100644 --- a/server/src/test/java/org/opensearch/action/resync/TransportResyncReplicationActionTests.java +++ b/server/src/test/java/org/opensearch/action/resync/TransportResyncReplicationActionTests.java @@ -206,7 +206,7 @@ public void testResyncDoesNotBlockOnPrimaryAction() throws Exception { shardId, 42L, 100, - new Translog.Operation[] { new Translog.Index("type", "id", 0, primaryTerm, 0L, bytes, null, -1) } + new Translog.Operation[] { new Translog.Index("id", 0, primaryTerm, 0L, bytes, null, -1) } ); final PlainActionFuture listener = new PlainActionFuture<>(); diff --git a/server/src/test/java/org/opensearch/action/search/ExpandSearchPhaseTests.java b/server/src/test/java/org/opensearch/action/search/ExpandSearchPhaseTests.java index b9254777c1784..8be2b9b203da6 100644 --- a/server/src/test/java/org/opensearch/action/search/ExpandSearchPhaseTests.java +++ b/server/src/test/java/org/opensearch/action/search/ExpandSearchPhaseTests.java @@ -35,7 +35,6 @@ import org.apache.lucene.search.TotalHits; import org.opensearch.action.ActionListener; import org.opensearch.common.document.DocumentField; -import org.opensearch.common.text.Text; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.InnerHitBuilder; import org.opensearch.index.query.QueryBuilder; @@ -66,8 +65,8 @@ public void testCollapseSingleHit() throws IOException { for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) { SearchHits hits = new SearchHits( new SearchHit[] { - new SearchHit(innerHitNum, "ID", new Text("type"), Collections.emptyMap(), Collections.emptyMap()), - new SearchHit(innerHitNum + 1, "ID", new Text("type"), Collections.emptyMap(), Collections.emptyMap()) }, + new SearchHit(innerHitNum, "ID", Collections.emptyMap(), Collections.emptyMap()), + new SearchHit(innerHitNum + 1, "ID", Collections.emptyMap(), Collections.emptyMap()) }, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1.0F ); @@ -109,7 +108,6 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL assertThat(groupBuilder.must(), Matchers.contains(QueryBuilders.termQuery("foo", "bar"))); } assertArrayEquals(mockSearchPhaseContext.getRequest().indices(), searchRequest.indices()); - assertArrayEquals(mockSearchPhaseContext.getRequest().types(), searchRequest.types()); List mSearchResponses = new ArrayList<>(numInnerHits); for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) { @@ -137,7 +135,6 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL new SearchHit( 1, "ID", - new Text("type"), Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue))), Collections.emptyMap() ) }, @@ -166,8 +163,8 @@ public void testFailOneItemFailsEntirePhase() throws IOException { SearchHits collapsedHits = new SearchHits( new SearchHit[] { - new SearchHit(2, "ID", new Text("type"), Collections.emptyMap(), Collections.emptyMap()), - new SearchHit(3, "ID", new Text("type"), Collections.emptyMap(), Collections.emptyMap()) }, + new SearchHit(2, "ID", Collections.emptyMap(), Collections.emptyMap()), + new SearchHit(3, "ID", Collections.emptyMap(), Collections.emptyMap()) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F ); @@ -210,14 +207,12 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL new SearchHit( 1, "ID", - new Text("type"), Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue))), Collections.emptyMap() ), new SearchHit( 2, "ID2", - new Text("type"), Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue))), Collections.emptyMap() ) }, @@ -247,14 +242,12 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL new SearchHit( 1, "ID", - new Text("type"), Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(null))), Collections.emptyMap() ), new SearchHit( 2, "ID2", - new Text("type"), Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(null))), Collections.emptyMap() ) }, diff --git a/server/src/test/java/org/opensearch/action/search/MultiSearchRequestTests.java b/server/src/test/java/org/opensearch/action/search/MultiSearchRequestTests.java index 3b09e9861bd1f..de80ea22071e8 100644 --- a/server/src/test/java/org/opensearch/action/search/MultiSearchRequestTests.java +++ b/server/src/test/java/org/opensearch/action/search/MultiSearchRequestTests.java @@ -88,13 +88,11 @@ public void testSimpleAdd() throws Exception { request.requests().get(0).indicesOptions(), equalTo(IndicesOptions.fromOptions(true, true, true, true, SearchRequest.DEFAULT_INDICES_OPTIONS)) ); - assertThat(request.requests().get(0).types().length, equalTo(0)); assertThat(request.requests().get(1).indices()[0], equalTo("test")); assertThat( request.requests().get(1).indicesOptions(), equalTo(IndicesOptions.fromOptions(false, true, true, true, SearchRequest.DEFAULT_INDICES_OPTIONS)) ); - assertThat(request.requests().get(1).types()[0], equalTo("type1")); assertThat(request.requests().get(2).indices()[0], equalTo("test")); assertThat( request.requests().get(2).indicesOptions(), @@ -112,12 +110,9 @@ public void testSimpleAdd() throws Exception { ); assertThat(request.requests().get(5).indices(), is(Strings.EMPTY_ARRAY)); - assertThat(request.requests().get(5).types().length, equalTo(0)); assertThat(request.requests().get(6).indices(), is(Strings.EMPTY_ARRAY)); - assertThat(request.requests().get(6).types().length, equalTo(0)); assertThat(request.requests().get(6).searchType(), equalTo(SearchType.DFS_QUERY_THEN_FETCH)); assertThat(request.requests().get(7).indices(), is(Strings.EMPTY_ARRAY)); - assertThat(request.requests().get(7).types().length, equalTo(0)); } public void testFailWithUnknownKey() { @@ -148,7 +143,6 @@ public void testSimpleAddWithCarriageReturn() throws Exception { request.requests().get(0).indicesOptions(), equalTo(IndicesOptions.fromOptions(true, true, true, true, SearchRequest.DEFAULT_INDICES_OPTIONS)) ); - assertThat(request.requests().get(0).types().length, equalTo(0)); } public void testCancelAfterIntervalAtParentAndFewChildRequest() throws Exception { @@ -197,23 +191,17 @@ public void testDefaultIndicesOptions() throws IOException { request.requests().get(0).indicesOptions(), equalTo(IndicesOptions.fromOptions(true, true, true, true, SearchRequest.DEFAULT_INDICES_OPTIONS)) ); - assertThat(request.requests().get(0).types().length, equalTo(0)); } public void testSimpleAdd2() throws Exception { MultiSearchRequest request = parseMultiSearchRequestFromFile("/org/opensearch/action/search/simple-msearch2.json"); assertThat(request.requests().size(), equalTo(5)); assertThat(request.requests().get(0).indices()[0], equalTo("test")); - assertThat(request.requests().get(0).types().length, equalTo(0)); assertThat(request.requests().get(1).indices()[0], equalTo("test")); - assertThat(request.requests().get(1).types()[0], equalTo("type1")); assertThat(request.requests().get(2).indices(), is(Strings.EMPTY_ARRAY)); - assertThat(request.requests().get(2).types().length, equalTo(0)); assertThat(request.requests().get(3).indices(), is(Strings.EMPTY_ARRAY)); - assertThat(request.requests().get(3).types().length, equalTo(0)); assertThat(request.requests().get(3).searchType(), equalTo(SearchType.DFS_QUERY_THEN_FETCH)); assertThat(request.requests().get(4).indices(), is(Strings.EMPTY_ARRAY)); - assertThat(request.requests().get(4).types().length, equalTo(0)); } public void testSimpleAdd3() throws Exception { @@ -223,13 +211,9 @@ public void testSimpleAdd3() throws Exception { assertThat(request.requests().get(0).indices()[1], equalTo("test1")); assertThat(request.requests().get(1).indices()[0], equalTo("test2")); assertThat(request.requests().get(1).indices()[1], equalTo("test3")); - assertThat(request.requests().get(1).types()[0], equalTo("type1")); assertThat(request.requests().get(2).indices()[0], equalTo("test4")); assertThat(request.requests().get(2).indices()[1], equalTo("test1")); - assertThat(request.requests().get(2).types()[0], equalTo("type2")); - assertThat(request.requests().get(2).types()[1], equalTo("type1")); assertThat(request.requests().get(3).indices(), is(Strings.EMPTY_ARRAY)); - assertThat(request.requests().get(3).types().length, equalTo(0)); assertThat(request.requests().get(3).searchType(), equalTo(SearchType.DFS_QUERY_THEN_FETCH)); } @@ -242,13 +226,10 @@ public void testSimpleAdd4() throws Exception { assertThat(request.requests().get(0).preference(), nullValue()); assertThat(request.requests().get(1).indices()[0], equalTo("test2")); assertThat(request.requests().get(1).indices()[1], equalTo("test3")); - assertThat(request.requests().get(1).types()[0], equalTo("type1")); assertThat(request.requests().get(1).requestCache(), nullValue()); assertThat(request.requests().get(1).preference(), equalTo("_local")); assertThat(request.requests().get(2).indices()[0], equalTo("test4")); assertThat(request.requests().get(2).indices()[1], equalTo("test1")); - assertThat(request.requests().get(2).types()[0], equalTo("type2")); - assertThat(request.requests().get(2).types()[1], equalTo("type1")); assertThat(request.requests().get(2).routing(), equalTo("123")); } @@ -419,7 +400,6 @@ public void testMultiLineSerialization() throws IOException { null, null, null, - null, xContentRegistry(), true, deprecationLogger diff --git a/server/src/test/java/org/opensearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/opensearch/action/search/SearchPhaseControllerTests.java index 3774cafe12c00..2a6d6ee7e45bb 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchPhaseControllerTests.java @@ -424,7 +424,7 @@ private static AtomicArray generateFetchResults(int nShards, List searchHits = new ArrayList<>(); for (ScoreDoc scoreDoc : mergedSearchDocs) { if (scoreDoc.shardIndex == shardIndex) { - searchHits.add(new SearchHit(scoreDoc.doc, "", new Text(""), Collections.emptyMap(), Collections.emptyMap())); + searchHits.add(new SearchHit(scoreDoc.doc, "", Collections.emptyMap(), Collections.emptyMap())); if (scoreDoc.score > maxScore) { maxScore = scoreDoc.score; } @@ -435,7 +435,7 @@ private static AtomicArray generateFetchResults(int nShards, for (CompletionSuggestion.Entry.Option option : ((CompletionSuggestion) suggestion).getOptions()) { ScoreDoc doc = option.getDoc(); if (doc.shardIndex == shardIndex) { - searchHits.add(new SearchHit(doc.doc, "", new Text(""), Collections.emptyMap(), Collections.emptyMap())); + searchHits.add(new SearchHit(doc.doc, "", Collections.emptyMap(), Collections.emptyMap())); if (doc.score > maxScore) { maxScore = doc.score; } diff --git a/server/src/test/java/org/opensearch/action/search/SearchRequestTests.java b/server/src/test/java/org/opensearch/action/search/SearchRequestTests.java index 8fe4e89a58f38..19544af63944c 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchRequestTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchRequestTests.java @@ -127,7 +127,6 @@ public void testIllegalArguments() { SearchRequest searchRequest = new SearchRequest(); assertNotNull(searchRequest.indices()); assertNotNull(searchRequest.indicesOptions()); - assertNotNull(searchRequest.types()); assertNotNull(searchRequest.searchType()); NullPointerException e = expectThrows(NullPointerException.class, () -> searchRequest.indices((String[]) null)); @@ -138,11 +137,6 @@ public void testIllegalArguments() { e = expectThrows(NullPointerException.class, () -> searchRequest.indicesOptions(null)); assertEquals("indicesOptions must not be null", e.getMessage()); - e = expectThrows(NullPointerException.class, () -> searchRequest.types((String[]) null)); - assertEquals("types must not be null", e.getMessage()); - e = expectThrows(NullPointerException.class, () -> searchRequest.types((String) null)); - assertEquals("type must not be null", e.getMessage()); - e = expectThrows(NullPointerException.class, () -> searchRequest.searchType((SearchType) null)); assertEquals("searchType must not be null", e.getMessage()); @@ -242,7 +236,6 @@ private SearchRequest mutate(SearchRequest searchRequest) { ) ) ); - mutators.add(() -> mutation.types(ArrayUtils.concat(searchRequest.types(), new String[] { randomAlphaOfLength(10) }))); mutators.add(() -> mutation.preference(randomValueOtherThan(searchRequest.preference(), () -> randomAlphaOfLengthBetween(3, 10)))); mutators.add(() -> mutation.routing(randomValueOtherThan(searchRequest.routing(), () -> randomAlphaOfLengthBetween(3, 10)))); mutators.add(() -> mutation.requestCache((randomValueOtherThan(searchRequest.requestCache(), OpenSearchTestCase::randomBoolean)))); @@ -273,13 +266,13 @@ private SearchRequest mutate(SearchRequest searchRequest) { } public void testDescriptionForDefault() { - assertThat(toDescription(new SearchRequest()), equalTo("indices[], types[], search_type[QUERY_THEN_FETCH], source[]")); + assertThat(toDescription(new SearchRequest()), equalTo("indices[], search_type[QUERY_THEN_FETCH], source[]")); } public void testDescriptionIncludesScroll() { assertThat( toDescription(new SearchRequest().scroll(TimeValue.timeValueMinutes(5))), - equalTo("indices[], types[], search_type[QUERY_THEN_FETCH], scroll[5m], source[]") + equalTo("indices[], search_type[QUERY_THEN_FETCH], scroll[5m], source[]") ); } diff --git a/server/src/test/java/org/opensearch/action/search/SearchResponseTests.java b/server/src/test/java/org/opensearch/action/search/SearchResponseTests.java index ae23d0bab9885..4a68503b6c3f8 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchResponseTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchResponseTests.java @@ -39,7 +39,6 @@ import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.Settings; -import org.opensearch.common.text.Text; import org.opensearch.common.xcontent.NamedXContentRegistry; import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; @@ -241,7 +240,7 @@ public void testFromXContentWithFailures() throws IOException { } public void testToXContent() { - SearchHit hit = new SearchHit(1, "id1", new Text("type"), Collections.emptyMap(), Collections.emptyMap()); + SearchHit hit = new SearchHit(1, "id1", Collections.emptyMap(), Collections.emptyMap()); hit.score(2.0f); SearchHit[] hits = new SearchHit[] { hit }; { @@ -279,7 +278,7 @@ public void testToXContent() { { expectedString.append("{\"total\":{\"value\":100,\"relation\":\"eq\"},"); expectedString.append("\"max_score\":1.5,"); - expectedString.append("\"hits\":[{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":2.0}]}"); + expectedString.append("\"hits\":[{\"_id\":\"id1\",\"_score\":2.0}]}"); } } expectedString.append("}"); @@ -326,7 +325,7 @@ public void testToXContent() { { expectedString.append("{\"total\":{\"value\":100,\"relation\":\"eq\"},"); expectedString.append("\"max_score\":1.5,"); - expectedString.append("\"hits\":[{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":2.0}]}"); + expectedString.append("\"hits\":[{\"_id\":\"id1\",\"_score\":2.0}]}"); } } expectedString.append("}"); diff --git a/server/src/test/java/org/opensearch/action/support/replication/TransportReplicationAllPermitsAcquisitionTests.java b/server/src/test/java/org/opensearch/action/support/replication/TransportReplicationAllPermitsAcquisitionTests.java index 29c8204af02b6..b034b335bd9a3 100644 --- a/server/src/test/java/org/opensearch/action/support/replication/TransportReplicationAllPermitsAcquisitionTests.java +++ b/server/src/test/java/org/opensearch/action/support/replication/TransportReplicationAllPermitsAcquisitionTests.java @@ -170,7 +170,7 @@ public void setUp() throws Exception { IndexMetadata indexMetadata = IndexMetadata.builder(shardId.getIndexName()) .settings(indexSettings) .primaryTerm(shardId.id(), primary.getOperationPrimaryTerm()) - .putMapping("_doc", "{ \"properties\": { \"value\": { \"type\": \"short\"}}}") + .putMapping("{ \"properties\": { \"value\": { \"type\": \"short\"}}}") .build(); state.metadata(Metadata.builder().put(indexMetadata, false).generateClusterUuidIfNeeded()); diff --git a/server/src/test/java/org/opensearch/action/termvectors/AbstractTermVectorsTestCase.java b/server/src/test/java/org/opensearch/action/termvectors/AbstractTermVectorsTestCase.java index bd586294f5d49..78214334928a4 100644 --- a/server/src/test/java/org/opensearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/server/src/test/java/org/opensearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -431,7 +431,7 @@ protected void validateResponse(TermVectorsResponse esResponse, Fields luceneFie } protected TermVectorsRequestBuilder getRequestForConfig(TestConfig config) { - return client().prepareTermVectors(randomBoolean() ? config.doc.index : config.doc.alias, config.doc.type, config.doc.id) + return client().prepareTermVectors(randomBoolean() ? config.doc.index : config.doc.alias, config.doc.id) .setPayloads(config.requestPayloads) .setOffsets(config.requestOffsets) .setPositions(config.requestPositions) diff --git a/server/src/test/java/org/opensearch/action/termvectors/GetTermVectorsTests.java b/server/src/test/java/org/opensearch/action/termvectors/GetTermVectorsTests.java index 699dad124db43..b7cdb3301384a 100644 --- a/server/src/test/java/org/opensearch/action/termvectors/GetTermVectorsTests.java +++ b/server/src/test/java/org/opensearch/action/termvectors/GetTermVectorsTests.java @@ -188,12 +188,13 @@ public void testRandomPayloadWithDelimitedPayloadTokenFilter() throws IOExceptio .build(); createIndex("test", setting, "type1", mapping); - client().prepareIndex("test", "type1", Integer.toString(1)) + client().prepareIndex("test") + .setId(Integer.toString(1)) .setSource(jsonBuilder().startObject().field("field", queryString).endObject()) .execute() .actionGet(); client().admin().indices().prepareRefresh().get(); - TermVectorsRequestBuilder resp = client().prepareTermVectors("test", "type1", Integer.toString(1)) + TermVectorsRequestBuilder resp = client().prepareTermVectors("test", Integer.toString(1)) .setPayloads(true) .setOffsets(true) .setPositions(true) diff --git a/server/src/test/java/org/opensearch/action/termvectors/TermVectorsUnitTests.java b/server/src/test/java/org/opensearch/action/termvectors/TermVectorsUnitTests.java index b204535b749eb..089dfcaf65517 100644 --- a/server/src/test/java/org/opensearch/action/termvectors/TermVectorsUnitTests.java +++ b/server/src/test/java/org/opensearch/action/termvectors/TermVectorsUnitTests.java @@ -49,6 +49,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; +import org.opensearch.LegacyESVersion; import org.opensearch.action.termvectors.TermVectorsRequest.Flag; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; @@ -57,7 +58,9 @@ import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.index.shard.ShardId; import org.opensearch.rest.action.document.RestTermVectorsAction; +import org.opensearch.tasks.TaskId; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.StreamsUtils; import org.hamcrest.Matchers; @@ -74,7 +77,7 @@ public class TermVectorsUnitTests extends OpenSearchTestCase { public void testStreamResponse() throws Exception { - TermVectorsResponse outResponse = new TermVectorsResponse("a", "b", "c"); + TermVectorsResponse outResponse = new TermVectorsResponse("a", "c"); outResponse.setExists(true); writeStandardTermVector(outResponse); @@ -91,7 +94,7 @@ public void testStreamResponse() throws Exception { // see if correct checkIfStandardTermVector(inResponse); - outResponse = new TermVectorsResponse("a", "b", "c"); + outResponse = new TermVectorsResponse("a", "c"); writeEmptyTermVector(outResponse); // write outBuffer = new ByteArrayOutputStream(); @@ -185,7 +188,7 @@ public void testRestRequestParsing() throws Exception { " {\"fields\" : [\"a\", \"b\",\"c\"], \"offsets\":false, \"positions\":false, \"payloads\":true}" ); - TermVectorsRequest tvr = new TermVectorsRequest(null, null, null); + TermVectorsRequest tvr = new TermVectorsRequest(null, null); XContentParser parser = createParser(JsonXContent.jsonXContent, inputBytes); TermVectorsRequest.parseRequest(tvr, parser); @@ -206,7 +209,7 @@ public void testRestRequestParsing() throws Exception { RestTermVectorsAction.addFieldStringsFromParameter(tvr, additionalFields); inputBytes = new BytesArray(" {\"offsets\":false, \"positions\":false, \"payloads\":true}"); - tvr = new TermVectorsRequest(null, null, null); + tvr = new TermVectorsRequest(null, null); parser = createParser(JsonXContent.jsonXContent, inputBytes); TermVectorsRequest.parseRequest(tvr, parser); additionalFields = ""; @@ -222,7 +225,7 @@ public void testRequestParsingThrowsException() { BytesReference inputBytes = new BytesArray( " {\"fields\" : \"a, b,c \", \"offsets\":false, \"positions\":false, \"payloads\":true, \"meaningless_term\":2}" ); - TermVectorsRequest tvr = new TermVectorsRequest(null, null, null); + TermVectorsRequest tvr = new TermVectorsRequest(null, null); boolean threwException = false; try { XContentParser parser = createParser(JsonXContent.jsonXContent, inputBytes); @@ -236,7 +239,7 @@ public void testRequestParsingThrowsException() { public void testStreamRequest() throws IOException { for (int i = 0; i < 10; i++) { - TermVectorsRequest request = new TermVectorsRequest("index", "type", "id"); + TermVectorsRequest request = new TermVectorsRequest("index", "id"); request.offsets(random().nextBoolean()); request.fieldStatistics(random().nextBoolean()); request.payloads(random().nextBoolean()); @@ -252,9 +255,55 @@ public void testStreamRequest() throws IOException { request.writeTo(out); // read - ByteArrayInputStream esInBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); - InputStreamStreamInput esBuffer = new InputStreamStreamInput(esInBuffer); - TermVectorsRequest req2 = new TermVectorsRequest(esBuffer); + ByteArrayInputStream opensearchInBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); + InputStreamStreamInput opensearchBuffer = new InputStreamStreamInput(opensearchInBuffer); + TermVectorsRequest req2 = new TermVectorsRequest(opensearchBuffer); + + assertThat(request.offsets(), equalTo(req2.offsets())); + assertThat(request.fieldStatistics(), equalTo(req2.fieldStatistics())); + assertThat(request.payloads(), equalTo(req2.payloads())); + assertThat(request.positions(), equalTo(req2.positions())); + assertThat(request.termStatistics(), equalTo(req2.termStatistics())); + assertThat(request.preference(), equalTo(pref)); + assertThat(request.routing(), equalTo(null)); + assertEquals(new BytesArray("{}"), request.doc()); + assertEquals(XContentType.JSON, request.xContentType()); + } + } + + public void testStreamRequestLegacyVersion() throws IOException { + for (int i = 0; i < 10; i++) { + TermVectorsRequest request = new TermVectorsRequest("index", "id"); + request.offsets(random().nextBoolean()); + request.fieldStatistics(random().nextBoolean()); + request.payloads(random().nextBoolean()); + request.positions(random().nextBoolean()); + request.termStatistics(random().nextBoolean()); + String pref = random().nextBoolean() ? "somePreference" : null; + request.preference(pref); + request.doc(new BytesArray("{}"), randomBoolean(), XContentType.JSON); + + // write using older version which contains types + ByteArrayOutputStream outBuffer = new ByteArrayOutputStream(); + OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer); + out.setVersion(LegacyESVersion.V_7_2_0); + request.writeTo(out); + + // First check the type on the stream was written as "_doc" by manually parsing the stream until the type + ByteArrayInputStream opensearchInBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); + InputStreamStreamInput opensearchBuffer = new InputStreamStreamInput(opensearchInBuffer); + TaskId.readFromStream(opensearchBuffer); + if (opensearchBuffer.readBoolean()) { + new ShardId(opensearchBuffer); + } + opensearchBuffer.readOptionalString(); + assertThat(opensearchBuffer.readString(), equalTo("_doc")); + + // now read the stream as normal to check it is parsed correct if received from an older node + opensearchInBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); + opensearchBuffer = new InputStreamStreamInput(opensearchInBuffer); + opensearchBuffer.setVersion(LegacyESVersion.V_7_2_0); + TermVectorsRequest req2 = new TermVectorsRequest(opensearchBuffer); assertThat(request.offsets(), equalTo(req2.offsets())); assertThat(request.fieldStatistics(), equalTo(req2.fieldStatistics())); @@ -281,7 +330,6 @@ public void testMultiParser() throws Exception { request.add(new TermVectorsRequest(), data); checkParsedParameters(request); - assertWarnings(RestTermVectorsAction.TYPES_DEPRECATION_MESSAGE); } void checkParsedParameters(MultiTermVectorsRequest request) { @@ -294,7 +342,6 @@ void checkParsedParameters(MultiTermVectorsRequest request) { fields.add("c"); for (TermVectorsRequest singleRequest : request.requests) { assertThat(singleRequest.index(), equalTo("testidx")); - assertThat(singleRequest.type(), equalTo("test")); assertThat(singleRequest.payloads(), equalTo(false)); assertThat(singleRequest.positions(), equalTo(false)); assertThat(singleRequest.offsets(), equalTo(false)); @@ -313,14 +360,12 @@ public void testMultiParserFilter() throws Exception { request.add(new TermVectorsRequest(), data); checkParsedFilterParameters(request); - assertWarnings(RestTermVectorsAction.TYPES_DEPRECATION_MESSAGE); } void checkParsedFilterParameters(MultiTermVectorsRequest multiRequest) { Set ids = new HashSet<>(Arrays.asList("1", "2")); for (TermVectorsRequest request : multiRequest.requests) { assertThat(request.index(), equalTo("testidx")); - assertThat(request.type(), equalTo("test")); assertTrue(ids.remove(request.id())); assertNotNull(request.filterSettings()); assertThat(request.filterSettings().maxNumTerms, equalTo(20)); diff --git a/server/src/test/java/org/opensearch/action/termvectors/TransportMultiTermVectorsActionTests.java b/server/src/test/java/org/opensearch/action/termvectors/TransportMultiTermVectorsActionTests.java index b6145979e8949..b62050a1b8050 100644 --- a/server/src/test/java/org/opensearch/action/termvectors/TransportMultiTermVectorsActionTests.java +++ b/server/src/test/java/org/opensearch/action/termvectors/TransportMultiTermVectorsActionTests.java @@ -128,7 +128,6 @@ public TaskManager getTaskManager() { .put(IndexMetadata.SETTING_INDEX_UUID, index1.getUUID()) ) .putMapping( - "_doc", XContentHelper.convertToJson( BytesReference.bytes( XContentFactory.jsonBuilder() @@ -154,7 +153,6 @@ public TaskManager getTaskManager() { .put(IndexMetadata.SETTING_INDEX_UUID, index1.getUUID()) ) .putMapping( - "_doc", XContentHelper.convertToJson( BytesReference.bytes( XContentFactory.jsonBuilder() @@ -230,8 +228,8 @@ public void testTransportMultiGetAction() { final Task task = createTask(); final NodeClient client = new NodeClient(Settings.EMPTY, threadPool); final MultiTermVectorsRequestBuilder request = new MultiTermVectorsRequestBuilder(client, MultiTermVectorsAction.INSTANCE); - request.add(new TermVectorsRequest("index1", "_doc", "1")); - request.add(new TermVectorsRequest("index2", "_doc", "2")); + request.add(new TermVectorsRequest("index1", "1")); + request.add(new TermVectorsRequest("index2", "2")); final AtomicBoolean shardActionInvoked = new AtomicBoolean(false); transportAction = new TransportMultiTermVectorsAction( @@ -262,8 +260,8 @@ public void testTransportMultiGetAction_withMissingRouting() { final Task task = createTask(); final NodeClient client = new NodeClient(Settings.EMPTY, threadPool); final MultiTermVectorsRequestBuilder request = new MultiTermVectorsRequestBuilder(client, MultiTermVectorsAction.INSTANCE); - request.add(new TermVectorsRequest("index2", "_doc", "1").routing("1")); - request.add(new TermVectorsRequest("index2", "_doc", "2")); + request.add(new TermVectorsRequest("index2", "1").routing("1")); + request.add(new TermVectorsRequest("index2", "2")); final AtomicBoolean shardActionInvoked = new AtomicBoolean(false); transportAction = new TransportMultiTermVectorsAction( diff --git a/server/src/test/java/org/opensearch/action/update/UpdateRequestTests.java b/server/src/test/java/org/opensearch/action/update/UpdateRequestTests.java index d339a37b51188..380b0628147de 100644 --- a/server/src/test/java/org/opensearch/action/update/UpdateRequestTests.java +++ b/server/src/test/java/org/opensearch/action/update/UpdateRequestTests.java @@ -142,7 +142,7 @@ public void setUp() throws Exception { @SuppressWarnings("unchecked") public void testFromXContent() throws Exception { - UpdateRequest request = new UpdateRequest("test", "type", "1"); + UpdateRequest request = new UpdateRequest("test", "1"); // simple script request.fromXContent(createParser(XContentFactory.jsonBuilder().startObject().field("script", "script1").endObject())); Script script = request.script(); @@ -168,7 +168,7 @@ public void testFromXContent() throws Exception { assertThat(params, equalTo(emptyMap())); // script with params - request = new UpdateRequest("test", "type", "1"); + request = new UpdateRequest("test", "1"); request.fromXContent( createParser( XContentFactory.jsonBuilder() @@ -192,7 +192,7 @@ public void testFromXContent() throws Exception { assertThat(params.size(), equalTo(1)); assertThat(params.get("param1").toString(), equalTo("value1")); - request = new UpdateRequest("test", "type", "1"); + request = new UpdateRequest("test", "1"); request.fromXContent( createParser( XContentFactory.jsonBuilder() @@ -217,7 +217,7 @@ public void testFromXContent() throws Exception { assertThat(params.get("param1").toString(), equalTo("value1")); // script with params and upsert - request = new UpdateRequest("test", "type", "1"); + request = new UpdateRequest("test", "1"); request.fromXContent( createParser( XContentFactory.jsonBuilder() @@ -254,7 +254,7 @@ public void testFromXContent() throws Exception { assertThat(upsertDoc.get("field1").toString(), equalTo("value1")); assertThat(((Map) upsertDoc.get("compound")).get("field2").toString(), equalTo("value2")); - request = new UpdateRequest("test", "type", "1"); + request = new UpdateRequest("test", "1"); request.fromXContent( createParser( XContentFactory.jsonBuilder() @@ -288,7 +288,7 @@ public void testFromXContent() throws Exception { assertThat(((Map) upsertDoc.get("compound")).get("field2").toString(), equalTo("value2")); // script with doc - request = new UpdateRequest("test", "type", "1"); + request = new UpdateRequest("test", "1"); request.fromXContent( createParser( XContentFactory.jsonBuilder() @@ -308,13 +308,13 @@ public void testFromXContent() throws Exception { } public void testUnknownFieldParsing() throws Exception { - UpdateRequest request = new UpdateRequest("test", "type", "1"); + UpdateRequest request = new UpdateRequest("test", "1"); XContentParser contentParser = createParser(XContentFactory.jsonBuilder().startObject().field("unknown_field", "test").endObject()); XContentParseException ex = expectThrows(XContentParseException.class, () -> request.fromXContent(contentParser)); assertEquals("[1:2] [UpdateRequest] unknown field [unknown_field]", ex.getMessage()); - UpdateRequest request2 = new UpdateRequest("test", "type", "1"); + UpdateRequest request2 = new UpdateRequest("test", "1"); XContentParser unknownObject = createParser( XContentFactory.jsonBuilder() .startObject() @@ -329,7 +329,7 @@ public void testUnknownFieldParsing() throws Exception { } public void testFetchSourceParsing() throws Exception { - UpdateRequest request = new UpdateRequest("test", "type1", "1"); + UpdateRequest request = new UpdateRequest("test", "1"); request.fromXContent(createParser(XContentFactory.jsonBuilder().startObject().field("_source", true).endObject())); assertThat(request.fetchSource(), notNullValue()); assertThat(request.fetchSource().includes().length, equalTo(0)); @@ -370,17 +370,15 @@ public void testFetchSourceParsing() throws Exception { public void testNowInScript() throws IOException { // We just upsert one document with now() using a script - IndexRequest indexRequest = new IndexRequest("test", "type1", "2").source( - jsonBuilder().startObject().field("foo", "bar").endObject() - ); + IndexRequest indexRequest = new IndexRequest("test").id("2").source(jsonBuilder().startObject().field("foo", "bar").endObject()); { - UpdateRequest updateRequest = new UpdateRequest("test", "type1", "2").upsert(indexRequest) + UpdateRequest updateRequest = new UpdateRequest("test", "2").upsert(indexRequest) .script(mockInlineScript("ctx._source.update_timestamp = ctx._now")) .scriptedUpsert(true); long nowInMillis = randomNonNegativeLong(); // We simulate that the document is not existing yet - GetResult getResult = new GetResult("test", "type1", "2", UNASSIGNED_SEQ_NO, 0, 0, false, null, null, null); + GetResult getResult = new GetResult("test", "2", UNASSIGNED_SEQ_NO, 0, 0, false, null, null, null); UpdateHelper.Result result = updateHelper.prepare(new ShardId("test", "_na_", 0), updateRequest, getResult, () -> nowInMillis); Writeable action = result.action(); assertThat(action, instanceOf(IndexRequest.class)); @@ -388,11 +386,11 @@ public void testNowInScript() throws IOException { assertEquals(nowInMillis, indexAction.sourceAsMap().get("update_timestamp")); } { - UpdateRequest updateRequest = new UpdateRequest("test", "type1", "2").upsert(indexRequest) + UpdateRequest updateRequest = new UpdateRequest("test", "2").upsert(indexRequest) .script(mockInlineScript("ctx._timestamp = ctx._now")) .scriptedUpsert(true); // We simulate that the document is not existing yet - GetResult getResult = new GetResult("test", "type1", "2", 0, 1, 0, true, new BytesArray("{}"), null, null); + GetResult getResult = new GetResult("test", "2", 0, 1, 0, true, new BytesArray("{}"), null, null); UpdateHelper.Result result = updateHelper.prepare(new ShardId("test", "_na_", 0), updateRequest, getResult, () -> 42L); Writeable action = result.action(); assertThat(action, instanceOf(IndexRequest.class)); @@ -400,15 +398,14 @@ public void testNowInScript() throws IOException { } public void testIndexTimeout() { - final GetResult getResult = new GetResult("test", "type", "1", 0, 1, 0, true, new BytesArray("{\"f\":\"v\"}"), null, null); - final UpdateRequest updateRequest = new UpdateRequest("test", "type", "1").script(mockInlineScript("return")) - .timeout(randomTimeValue()); + final GetResult getResult = new GetResult("test", "1", 0, 1, 0, true, new BytesArray("{\"f\":\"v\"}"), null, null); + final UpdateRequest updateRequest = new UpdateRequest("test", "1").script(mockInlineScript("return")).timeout(randomTimeValue()); runTimeoutTest(getResult, updateRequest); } public void testDeleteTimeout() { - final GetResult getResult = new GetResult("test", "type", "1", 0, 1, 0, true, new BytesArray("{\"f\":\"v\"}"), null, null); - final UpdateRequest updateRequest = new UpdateRequest("test", "type", "1").script(mockInlineScript("ctx.op = delete")) + final GetResult getResult = new GetResult("test", "1", 0, 1, 0, true, new BytesArray("{\"f\":\"v\"}"), null, null); + final UpdateRequest updateRequest = new UpdateRequest("test", "1").script(mockInlineScript("ctx.op = delete")) .timeout(randomTimeValue()); runTimeoutTest(getResult, updateRequest); } @@ -416,15 +413,15 @@ public void testDeleteTimeout() { public void testUpsertTimeout() throws IOException { final boolean exists = randomBoolean(); final BytesReference source = exists ? new BytesArray("{\"f\":\"v\"}") : null; - final GetResult getResult = new GetResult("test", "type", "1", UNASSIGNED_SEQ_NO, 0, 0, exists, source, null, null); + final GetResult getResult = new GetResult("test", "1", UNASSIGNED_SEQ_NO, 0, 0, exists, source, null, null); final XContentBuilder sourceBuilder = jsonBuilder(); sourceBuilder.startObject(); { sourceBuilder.field("f", "v"); } sourceBuilder.endObject(); - final IndexRequest upsert = new IndexRequest("test", "type", "1").source(sourceBuilder); - final UpdateRequest updateRequest = new UpdateRequest("test", "type", "1").upsert(upsert) + final IndexRequest upsert = new IndexRequest("test").id("1").source(sourceBuilder); + final UpdateRequest updateRequest = new UpdateRequest("test", "1").upsert(upsert) .script(mockInlineScript("return")) .timeout(randomTimeValue()); runTimeoutTest(getResult, updateRequest); @@ -514,11 +511,11 @@ public void testToAndFromXContent() throws IOException { } public void testToValidateUpsertRequestAndCAS() { - UpdateRequest updateRequest = new UpdateRequest("index", "type", "id"); + UpdateRequest updateRequest = new UpdateRequest("index", "id"); updateRequest.setIfSeqNo(1L); updateRequest.setIfPrimaryTerm(1L); updateRequest.doc("{}", XContentType.JSON); - updateRequest.upsert(new IndexRequest("index", "type", "id")); + updateRequest.upsert(new IndexRequest("index").id("id")); assertThat( updateRequest.validate().validationErrors(), contains("upsert requests don't support `if_seq_no` and `if_primary_term`") @@ -526,15 +523,15 @@ public void testToValidateUpsertRequestAndCAS() { } public void testToValidateUpsertRequestWithVersion() { - UpdateRequest updateRequest = new UpdateRequest("index", "type", "id"); + UpdateRequest updateRequest = new UpdateRequest("index", "id"); updateRequest.doc("{}", XContentType.JSON); - updateRequest.upsert(new IndexRequest("index", "type", "1").version(1L)); + updateRequest.upsert(new IndexRequest("index").id("1").version(1L)); assertThat(updateRequest.validate().validationErrors(), contains("can't provide version in upsert request")); } public void testValidate() { { - UpdateRequest request = new UpdateRequest("index", "type", "id"); + UpdateRequest request = new UpdateRequest("index", "id"); request.doc("{}", XContentType.JSON); ActionRequestValidationException validate = request.validate(); @@ -542,27 +539,18 @@ public void testValidate() { } { // Null types are defaulted to "_doc" - UpdateRequest request = new UpdateRequest("index", null, randomBoolean() ? "" : null); + UpdateRequest request = new UpdateRequest("index", null); request.doc("{}", XContentType.JSON); ActionRequestValidationException validate = request.validate(); assertThat(validate, not(nullValue())); assertThat(validate.validationErrors(), hasItems("id is missing")); } - { - // Non-null types are accepted but fail validation - UpdateRequest request = new UpdateRequest("index", "", randomBoolean() ? "" : null); - request.doc("{}", XContentType.JSON); - ActionRequestValidationException validate = request.validate(); - - assertThat(validate, not(nullValue())); - assertThat(validate.validationErrors(), hasItems("type is missing", "id is missing")); - } } public void testRoutingExtraction() throws Exception { - GetResult getResult = new GetResult("test", "type", "1", UNASSIGNED_SEQ_NO, 0, 0, false, null, null, null); - IndexRequest indexRequest = new IndexRequest("test", "type", "1"); + GetResult getResult = new GetResult("test", "1", UNASSIGNED_SEQ_NO, 0, 0, false, null, null, null); + IndexRequest indexRequest = new IndexRequest("test").id("1"); // There is no routing and parent because the document doesn't exist assertNull(UpdateHelper.calculateRouting(getResult, null)); @@ -571,7 +559,7 @@ public void testRoutingExtraction() throws Exception { assertNull(UpdateHelper.calculateRouting(getResult, indexRequest)); // Doc exists but has no source or fields - getResult = new GetResult("test", "type", "1", 0, 1, 0, true, null, null, null); + getResult = new GetResult("test", "1", 0, 1, 0, true, null, null, null); // There is no routing and parent on either request assertNull(UpdateHelper.calculateRouting(getResult, indexRequest)); @@ -580,7 +568,7 @@ public void testRoutingExtraction() throws Exception { fields.put("_routing", new DocumentField("_routing", Collections.singletonList("routing1"))); // Doc exists and has the parent and routing fields - getResult = new GetResult("test", "type", "1", 0, 1, 0, true, null, fields, null); + getResult = new GetResult("test", "1", 0, 1, 0, true, null, fields, null); // Use the get result parent and routing assertThat(UpdateHelper.calculateRouting(getResult, indexRequest), equalTo("routing1")); @@ -588,9 +576,9 @@ public void testRoutingExtraction() throws Exception { public void testNoopDetection() throws Exception { ShardId shardId = new ShardId("test", "", 0); - GetResult getResult = new GetResult("test", "type", "1", 0, 1, 0, true, new BytesArray("{\"body\": \"foo\"}"), null, null); + GetResult getResult = new GetResult("test", "1", 0, 1, 0, true, new BytesArray("{\"body\": \"foo\"}"), null, null); - UpdateRequest request = new UpdateRequest("test", "type1", "1").fromXContent( + UpdateRequest request = new UpdateRequest("test", "1").fromXContent( createParser(JsonXContent.jsonXContent, new BytesArray("{\"doc\": {\"body\": \"foo\"}}")) ); @@ -606,7 +594,7 @@ public void testNoopDetection() throws Exception { assertThat(result.updatedSourceAsMap().get("body").toString(), equalTo("foo")); // Change the request to be a different doc - request = new UpdateRequest("test", "type1", "1").fromXContent( + request = new UpdateRequest("test", "1").fromXContent( createParser(JsonXContent.jsonXContent, new BytesArray("{\"doc\": {\"body\": \"bar\"}}")) ); result = updateHelper.prepareUpdateIndexRequest(shardId, request, getResult, true); @@ -619,9 +607,9 @@ public void testNoopDetection() throws Exception { public void testUpdateScript() throws Exception { ShardId shardId = new ShardId("test", "", 0); - GetResult getResult = new GetResult("test", "type", "1", 0, 1, 0, true, new BytesArray("{\"body\": \"bar\"}"), null, null); + GetResult getResult = new GetResult("test", "1", 0, 1, 0, true, new BytesArray("{\"body\": \"bar\"}"), null, null); - UpdateRequest request = new UpdateRequest("test", "type1", "1").script(mockInlineScript("ctx._source.body = \"foo\"")); + UpdateRequest request = new UpdateRequest("test", "1").script(mockInlineScript("ctx._source.body = \"foo\"")); UpdateHelper.Result result = updateHelper.prepareUpdateScriptRequest( shardId, @@ -635,7 +623,7 @@ public void testUpdateScript() throws Exception { assertThat(result.updatedSourceAsMap().get("body").toString(), equalTo("foo")); // Now where the script changes the op to "delete" - request = new UpdateRequest("test", "type1", "1").script(mockInlineScript("ctx.op = delete")); + request = new UpdateRequest("test", "1").script(mockInlineScript("ctx.op = delete")); result = updateHelper.prepareUpdateScriptRequest(shardId, request, getResult, OpenSearchTestCase::randomNonNegativeLong); @@ -645,9 +633,9 @@ public void testUpdateScript() throws Exception { // We treat everything else as a No-op boolean goodNoop = randomBoolean(); if (goodNoop) { - request = new UpdateRequest("test", "type1", "1").script(mockInlineScript("ctx.op = none")); + request = new UpdateRequest("test", "1").script(mockInlineScript("ctx.op = none")); } else { - request = new UpdateRequest("test", "type1", "1").script(mockInlineScript("ctx.op = bad")); + request = new UpdateRequest("test", "1").script(mockInlineScript("ctx.op = bad")); } result = updateHelper.prepareUpdateScriptRequest(shardId, request, getResult, OpenSearchTestCase::randomNonNegativeLong); @@ -657,23 +645,23 @@ public void testUpdateScript() throws Exception { } public void testToString() throws IOException { - UpdateRequest request = new UpdateRequest("test", "type1", "1").script(mockInlineScript("ctx._source.body = \"foo\"")); + UpdateRequest request = new UpdateRequest("test", "1").script(mockInlineScript("ctx._source.body = \"foo\"")); assertThat( request.toString(), equalTo( - "update {[test][type1][1], doc_as_upsert[false], " + "update {[test][1], doc_as_upsert[false], " + "script[Script{type=inline, lang='mock', idOrCode='ctx._source.body = \"foo\"', options={}, params={}}], " + "scripted_upsert[false], detect_noop[true]}" ) ); - request = new UpdateRequest("test", "type1", "1").fromXContent( + request = new UpdateRequest("test", "1").fromXContent( createParser(JsonXContent.jsonXContent, new BytesArray("{\"doc\": {\"body\": \"bar\"}}")) ); assertThat( request.toString(), equalTo( - "update {[test][type1][1], doc_as_upsert[false], " - + "doc[index {[null][_doc][null], source[{\"body\":\"bar\"}]}], scripted_upsert[false], detect_noop[true]}" + "update {[test][1], doc_as_upsert[false], " + + "doc[index {[null][null], source[{\"body\":\"bar\"}]}], scripted_upsert[false], detect_noop[true]}" ) ); } diff --git a/server/src/test/java/org/opensearch/action/update/UpdateResponseTests.java b/server/src/test/java/org/opensearch/action/update/UpdateResponseTests.java index 8ba87114f542c..ba0abd6c8e349 100644 --- a/server/src/test/java/org/opensearch/action/update/UpdateResponseTests.java +++ b/server/src/test/java/org/opensearch/action/update/UpdateResponseTests.java @@ -68,10 +68,10 @@ public class UpdateResponseTests extends OpenSearchTestCase { public void testToXContent() throws IOException { { - UpdateResponse updateResponse = new UpdateResponse(new ShardId("index", "index_uuid", 0), "type", "id", -2, 0, 0, NOT_FOUND); + UpdateResponse updateResponse = new UpdateResponse(new ShardId("index", "index_uuid", 0), "id", -2, 0, 0, NOT_FOUND); String output = Strings.toString(updateResponse); assertEquals( - "{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":0,\"result\":\"not_found\"," + "{\"_index\":\"index\",\"_id\":\"id\",\"_version\":0,\"result\":\"not_found\"," + "\"_shards\":{\"total\":0,\"successful\":0,\"failed\":0}}", output ); @@ -80,7 +80,6 @@ public void testToXContent() throws IOException { UpdateResponse updateResponse = new UpdateResponse( new ReplicationResponse.ShardInfo(10, 6), new ShardId("index", "index_uuid", 1), - "type", "id", 3, 17, @@ -89,7 +88,7 @@ public void testToXContent() throws IOException { ); String output = Strings.toString(updateResponse); assertEquals( - "{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":1,\"result\":\"deleted\"," + "{\"_index\":\"index\",\"_id\":\"id\",\"_version\":1,\"result\":\"deleted\"," + "\"_shards\":{\"total\":10,\"successful\":6,\"failed\":0},\"_seq_no\":3,\"_primary_term\":17}", output ); @@ -103,18 +102,17 @@ public void testToXContent() throws IOException { UpdateResponse updateResponse = new UpdateResponse( new ReplicationResponse.ShardInfo(3, 2), new ShardId("books", "books_uuid", 2), - "book", "1", 7, 17, 2, UPDATED ); - updateResponse.setGetResult(new GetResult("books", "book", "1", 0, 1, 2, true, source, fields, null)); + updateResponse.setGetResult(new GetResult("books", "1", 0, 1, 2, true, source, fields, null)); String output = Strings.toString(updateResponse); assertEquals( - "{\"_index\":\"books\",\"_type\":\"book\",\"_id\":\"1\",\"_version\":2,\"result\":\"updated\"," + "{\"_index\":\"books\",\"_id\":\"1\",\"_version\":2,\"result\":\"updated\"," + "\"_shards\":{\"total\":3,\"successful\":2,\"failed\":0},\"_seq_no\":7,\"_primary_term\":17,\"get\":{" + "\"_seq_no\":0,\"_primary_term\":1,\"found\":true," + "\"_source\":{\"title\":\"Book title\",\"isbn\":\"ABC-123\"},\"fields\":{\"isbn\":[\"ABC-123\"],\"title\":[\"Book " @@ -192,7 +190,6 @@ public static Tuple randomUpdateResponse(XConten GetResult expectedGetResult = getResults.v2(); String index = actualGetResult.getIndex(); - String type = actualGetResult.getType(); String id = actualGetResult.getId(); long version = actualGetResult.getVersion(); DocWriteResponse.Result result = actualGetResult.isExists() ? DocWriteResponse.Result.UPDATED : DocWriteResponse.Result.NOT_FOUND; @@ -211,11 +208,11 @@ public static Tuple randomUpdateResponse(XConten if (seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) { Tuple shardInfos = RandomObjects.randomShardInfo(random()); - actual = new UpdateResponse(shardInfos.v1(), actualShardId, type, id, seqNo, primaryTerm, version, result); - expected = new UpdateResponse(shardInfos.v2(), expectedShardId, type, id, seqNo, primaryTerm, version, result); + actual = new UpdateResponse(shardInfos.v1(), actualShardId, id, seqNo, primaryTerm, version, result); + expected = new UpdateResponse(shardInfos.v2(), expectedShardId, id, seqNo, primaryTerm, version, result); } else { - actual = new UpdateResponse(actualShardId, type, id, seqNo, primaryTerm, version, result); - expected = new UpdateResponse(expectedShardId, type, id, seqNo, primaryTerm, version, result); + actual = new UpdateResponse(actualShardId, id, seqNo, primaryTerm, version, result); + expected = new UpdateResponse(expectedShardId, id, seqNo, primaryTerm, version, result); } if (actualGetResult.isExists()) { diff --git a/server/src/test/java/org/opensearch/client/AbstractClientHeadersTestCase.java b/server/src/test/java/org/opensearch/client/AbstractClientHeadersTestCase.java index d37ff7d480bd8..92a88aa7940ee 100644 --- a/server/src/test/java/org/opensearch/client/AbstractClientHeadersTestCase.java +++ b/server/src/test/java/org/opensearch/client/AbstractClientHeadersTestCase.java @@ -121,14 +121,15 @@ public void testActions() { // validation in the settings??? - ugly and conceptually wrong) // choosing arbitrary top level actions to test - client.prepareGet("idx", "type", "id").execute(new AssertingActionListener<>(GetAction.NAME, client.threadPool())); + client.prepareGet("idx", "id").execute(new AssertingActionListener<>(GetAction.NAME, client.threadPool())); client.prepareSearch().execute(new AssertingActionListener<>(SearchAction.NAME, client.threadPool())); - client.prepareDelete("idx", "type", "id").execute(new AssertingActionListener<>(DeleteAction.NAME, client.threadPool())); + client.prepareDelete("idx", "id").execute(new AssertingActionListener<>(DeleteAction.NAME, client.threadPool())); client.admin() .cluster() .prepareDeleteStoredScript("id") .execute(new AssertingActionListener<>(DeleteStoredScriptAction.NAME, client.threadPool())); - client.prepareIndex("idx", "type", "id") + client.prepareIndex("idx") + .setId("id") .setSource("source", XContentType.JSON) .execute(new AssertingActionListener<>(IndexAction.NAME, client.threadPool())); @@ -156,7 +157,7 @@ public void testOverrideHeader() throws Exception { expected.put("key1", key1Val); expected.put("key2", "val 2"); client.threadPool().getThreadContext().putHeader("key1", key1Val); - client.prepareGet("idx", "type", "id").execute(new AssertingActionListener<>(GetAction.NAME, expected, client.threadPool())); + client.prepareGet("idx", "id").execute(new AssertingActionListener<>(GetAction.NAME, expected, client.threadPool())); client.admin() .cluster() diff --git a/server/src/test/java/org/opensearch/cluster/ClusterModuleTests.java b/server/src/test/java/org/opensearch/cluster/ClusterModuleTests.java index 8869c2857aa4f..004b784311b54 100644 --- a/server/src/test/java/org/opensearch/cluster/ClusterModuleTests.java +++ b/server/src/test/java/org/opensearch/cluster/ClusterModuleTests.java @@ -254,29 +254,29 @@ public void testAllocationDeciderOrder() { } public void testPre63CustomsFiltering() { - final String whiteListedClusterCustom = randomFrom(ClusterModule.PRE_6_3_CLUSTER_CUSTOMS_WHITE_LIST); - final String whiteListedMetadataCustom = randomFrom(ClusterModule.PRE_6_3_METADATA_CUSTOMS_WHITE_LIST); + final String allowListedClusterCustom = randomFrom(ClusterModule.PRE_6_3_CLUSTER_CUSTOMS_WHITE_LIST); + final String allowListedMetadataCustom = randomFrom(ClusterModule.PRE_6_3_METADATA_CUSTOMS_WHITE_LIST); final ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) - .putCustom(whiteListedClusterCustom, new RestoreInProgress.Builder().build()) + .putCustom(allowListedClusterCustom, new RestoreInProgress.Builder().build()) .putCustom("other", new RestoreInProgress.Builder().build()) .metadata( Metadata.builder() - .putCustom(whiteListedMetadataCustom, new RepositoriesMetadata(Collections.emptyList())) + .putCustom(allowListedMetadataCustom, new RepositoriesMetadata(Collections.emptyList())) .putCustom("other", new RepositoriesMetadata(Collections.emptyList())) .build() ) .build(); - assertNotNull(clusterState.custom(whiteListedClusterCustom)); + assertNotNull(clusterState.custom(allowListedClusterCustom)); assertNotNull(clusterState.custom("other")); - assertNotNull(clusterState.metadata().custom(whiteListedMetadataCustom)); + assertNotNull(clusterState.metadata().custom(allowListedMetadataCustom)); assertNotNull(clusterState.metadata().custom("other")); final ClusterState fixedClusterState = ClusterModule.filterCustomsForPre63Clients(clusterState); - assertNotNull(fixedClusterState.custom(whiteListedClusterCustom)); + assertNotNull(fixedClusterState.custom(allowListedClusterCustom)); assertNull(fixedClusterState.custom("other")); - assertNotNull(fixedClusterState.metadata().custom(whiteListedMetadataCustom)); + assertNotNull(fixedClusterState.metadata().custom(allowListedMetadataCustom)); assertNull(fixedClusterState.metadata().custom("other")); } diff --git a/server/src/test/java/org/opensearch/cluster/ClusterStateTests.java b/server/src/test/java/org/opensearch/cluster/ClusterStateTests.java index 03db6b22bc8bd..4cc3108d6bf85 100644 --- a/server/src/test/java/org/opensearch/cluster/ClusterStateTests.java +++ b/server/src/test/java/org/opensearch/cluster/ClusterStateTests.java @@ -221,9 +221,7 @@ public void testToXContent() throws IOException { + " }\n" + " },\n" + " \"mappings\" : {\n" - + " \"type\" : {\n" - + " \"key1\" : { }\n" - + " }\n" + + " \"key1\" : { }\n" + " },\n" + " \"aliases\" : { }\n" + " }\n" @@ -424,9 +422,7 @@ public void testToXContent_FlatSettingTrue_ReduceMappingFalse() throws IOExcepti + "\"\n" + " },\n" + " \"mappings\" : {\n" - + " \"type\" : {\n" - + " \"key1\" : { }\n" - + " }\n" + + " \"key1\" : { }\n" + " },\n" + " \"aliases\" : { }\n" + " }\n" @@ -627,11 +623,7 @@ public void testToXContent_FlatSettingFalse_ReduceMappingTrue() throws IOExcepti + " }\n" + " }\n" + " },\n" - + " \"mappings\" : {\n" - + " \"type\" : {\n" - + " \"key1\" : { }\n" - + " }\n" - + " },\n" + + " \"mappings\" : { },\n" + " \"aliases\" : { }\n" + " }\n" + " },\n" diff --git a/server/src/test/java/org/opensearch/cluster/action/index/MappingUpdatedActionTests.java b/server/src/test/java/org/opensearch/cluster/action/index/MappingUpdatedActionTests.java index d6812f7e53131..2278d09722fe2 100644 --- a/server/src/test/java/org/opensearch/cluster/action/index/MappingUpdatedActionTests.java +++ b/server/src/test/java/org/opensearch/cluster/action/index/MappingUpdatedActionTests.java @@ -124,19 +124,19 @@ public void testMappingUpdatedActionBlocks() throws Exception { ) { @Override - protected void sendUpdateMapping(Index index, String type, Mapping mappingUpdate, ActionListener listener) { + protected void sendUpdateMapping(Index index, Mapping mappingUpdate, ActionListener listener) { inFlightListeners.add(listener); } }; PlainActionFuture fut1 = new PlainActionFuture<>(); - mua.updateMappingOnMaster(null, "test", null, fut1); + mua.updateMappingOnMaster(null, null, fut1); assertEquals(1, inFlightListeners.size()); assertEquals(0, mua.blockedThreads()); PlainActionFuture fut2 = new PlainActionFuture<>(); Thread thread = new Thread(() -> { - mua.updateMappingOnMaster(null, "test", null, fut2); // blocked + mua.updateMappingOnMaster(null, null, fut2); // blocked }); thread.start(); assertBusy(() -> assertEquals(1, mua.blockedThreads())); @@ -180,7 +180,7 @@ public void testSendUpdateMappingUsingPutMappingAction() { RootObjectMapper rootObjectMapper = new RootObjectMapper.Builder("name").build(context); Mapping update = new Mapping(LegacyESVersion.V_7_8_0, rootObjectMapper, new MetadataFieldMapper[0], Map.of()); - mua.sendUpdateMapping(new Index("name", "uuid"), "type", update, ActionListener.wrap(() -> {})); + mua.sendUpdateMapping(new Index("name", "uuid"), update, ActionListener.wrap(() -> {})); verify(indicesAdminClient).putMapping(any(), any()); } @@ -210,7 +210,7 @@ public void testSendUpdateMappingUsingAutoPutMappingAction() { RootObjectMapper rootObjectMapper = new RootObjectMapper.Builder("name").build(context); Mapping update = new Mapping(LegacyESVersion.V_7_9_0, rootObjectMapper, new MetadataFieldMapper[0], Map.of()); - mua.sendUpdateMapping(new Index("name", "uuid"), "type", update, ActionListener.wrap(() -> {})); + mua.sendUpdateMapping(new Index("name", "uuid"), update, ActionListener.wrap(() -> {})); verify(indicesAdminClient).execute(eq(AutoPutMappingAction.INSTANCE), any(), any()); } } diff --git a/server/src/test/java/org/opensearch/cluster/metadata/IndexMetadataTests.java b/server/src/test/java/org/opensearch/cluster/metadata/IndexMetadataTests.java index cde2a762786af..87860b8c536ef 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/IndexMetadataTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/IndexMetadataTests.java @@ -32,7 +32,6 @@ package org.opensearch.cluster.metadata; -import org.opensearch.Version; import org.opensearch.action.admin.indices.rollover.MaxAgeCondition; import org.opensearch.action.admin.indices.rollover.MaxDocsCondition; import org.opensearch.action.admin.indices.rollover.MaxSizeCondition; @@ -52,7 +51,6 @@ import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.shard.ShardId; import org.opensearch.indices.IndicesModule; import org.opensearch.test.OpenSearchTestCase; @@ -363,32 +361,6 @@ public void testNumberOfRoutingShards() { assertEquals("the number of source shards [2] must be a factor of [3]", iae.getMessage()); } - public void testMappingOrDefault() throws IOException { - Settings settings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) - .build(); - IndexMetadata meta = IndexMetadata.builder("index").settings(settings).build(); - assertNull(meta.mappingOrDefault()); - - meta = IndexMetadata.builder("index").settings(settings).putMapping("type", "{}").build(); - assertNotNull(meta.mappingOrDefault()); - assertEquals("type", meta.mappingOrDefault().type()); - - meta = IndexMetadata.builder("index").settings(settings).putMapping(MapperService.DEFAULT_MAPPING, "{}").build(); - assertNotNull(meta.mappingOrDefault()); - assertEquals(MapperService.DEFAULT_MAPPING, meta.mappingOrDefault().type()); - - meta = IndexMetadata.builder("index") - .settings(settings) - .putMapping("type", "{}") - .putMapping(MapperService.DEFAULT_MAPPING, "{}") - .build(); - assertNotNull(meta.mappingOrDefault()); - assertEquals("type", meta.mappingOrDefault().type()); - } - public void testMissingNumberOfShards() { final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> IndexMetadata.builder("test").build()); assertThat(e.getMessage(), containsString("must specify number of shards for index [test]")); diff --git a/server/src/test/java/org/opensearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/opensearch/cluster/metadata/IndexNameExpressionResolverTests.java index 2a18fed2d68e7..e736e27e5aa44 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -1776,7 +1776,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithSingleIndex() { assertArrayEquals(new String[] { "test-alias" }, strings); DocWriteRequest request = randomFrom( new IndexRequest("test-alias"), - new UpdateRequest("test-alias", "_type", "_id"), + new UpdateRequest("test-alias", "_id"), new DeleteRequest("test-alias") ); IllegalArgumentException exception = expectThrows( @@ -1811,7 +1811,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithMultipleIndices() { assertArrayEquals(new String[] { "test-alias" }, strings); DocWriteRequest request = randomFrom( new IndexRequest("test-alias"), - new UpdateRequest("test-alias", "_type", "_id"), + new UpdateRequest("test-alias", "_id"), new DeleteRequest("test-alias") ); IllegalArgumentException exception = expectThrows( diff --git a/server/src/test/java/org/opensearch/cluster/metadata/IndexTemplateMetadataTests.java b/server/src/test/java/org/opensearch/cluster/metadata/IndexTemplateMetadataTests.java index 203001b215cda..fb5537b5292ba 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/IndexTemplateMetadataTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/IndexTemplateMetadataTests.java @@ -49,14 +49,12 @@ import java.util.Arrays; import java.util.Collections; -import static java.util.Collections.singletonMap; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.contains; public class IndexTemplateMetadataTests extends OpenSearchTestCase { public void testIndexTemplateMetadataXContentRoundTrip() throws Exception { - ToXContent.Params params = new ToXContent.MapParams(singletonMap("reduce_mappings", "true")); String template = "{\"index_patterns\" : [ \".test-*\" ],\"order\" : 1000," + "\"settings\" : {\"number_of_shards\" : 1,\"number_of_replicas\" : 0}," @@ -84,7 +82,7 @@ public void testIndexTemplateMetadataXContentRoundTrip() throws Exception { final BytesReference templateBytesRoundTrip; try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent)) { builder.startObject(); - IndexTemplateMetadata.Builder.toXContentWithTypes(indexTemplateMetadata, builder, params); + IndexTemplateMetadata.Builder.toXContentWithTypes(indexTemplateMetadata, builder, ToXContent.EMPTY_PARAMS); builder.endObject(); templateBytesRoundTrip = BytesReference.bytes(builder); } diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java index a60946f8befaa..ddaea6edbfd90 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java @@ -240,7 +240,7 @@ private static MetadataCreateIndexService getMetadataCreateIndexService() throws .put(request.settings()) .build() ) - .putMapping("_doc", generateMapping("@timestamp")) + .putMapping(generateMapping("@timestamp")) .numberOfShards(1) .numberOfReplicas(1) .build(), diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java index 45fc92ab66062..19f6a516ca83a 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -109,7 +109,6 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.singleton; import static java.util.Collections.singletonList; -import static java.util.Collections.singletonMap; import static org.opensearch.index.IndexSettings.INDEX_SOFT_DELETES_SETTING; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; @@ -711,20 +710,18 @@ public void testParseMappingsAppliesDataFromTemplateAndRequest() throws Exceptio templateBuilder.putAlias(AliasMetadata.builder("alias1")); templateBuilder.putMapping("type", createMapping("mapping_from_template", "text")); }); - request.mappings(singletonMap("type", createMapping("mapping_from_request", "text").string())); + request.mappings(createMapping("mapping_from_request", "text").string()); - Map> parsedMappings = MetadataCreateIndexService.parseV1Mappings( + Map parsedMappings = MetadataCreateIndexService.parseV1Mappings( request.mappings(), - Collections.singletonList(convertMappings(templateMetadata.getMappings())), + Collections.singletonList(templateMetadata.getMappings()), NamedXContentRegistry.EMPTY ); - assertThat(parsedMappings, hasKey("type")); - Map mappingType = parsedMappings.get("type"); - assertThat(mappingType, hasKey("type")); - Map type = (Map) mappingType.get("type"); - assertThat(type, hasKey("properties")); - Map mappingsProperties = (Map) type.get("properties"); + assertThat(parsedMappings, hasKey(MapperService.SINGLE_MAPPING_NAME)); + Map doc = (Map) parsedMappings.get(MapperService.SINGLE_MAPPING_NAME); + assertThat(doc, hasKey("properties")); + Map mappingsProperties = (Map) doc.get("properties"); assertThat(mappingsProperties, hasKey("mapping_from_request")); assertThat(mappingsProperties, hasKey("mapping_from_template")); } @@ -781,17 +778,17 @@ public void testRequestDataHavePriorityOverTemplateData() throws Exception { IndexTemplateMetadata templateMetadata = addMatchingTemplate( builder -> builder.putAlias(AliasMetadata.builder("alias").searchRouting("fromTemplate").build()) - .putMapping("type", templateMapping) + .putMapping("_doc", templateMapping) .settings(Settings.builder().put("key1", "templateValue")) ); - request.mappings(singletonMap("type", reqMapping.string())); - request.aliases(singleton(new Alias("alias").searchRouting("fromRequest"))); + request.mappings(reqMapping.string()); + request.aliases(Collections.singleton(new Alias("alias").searchRouting("fromRequest"))); request.settings(Settings.builder().put("key1", "requestValue").build()); - Map> parsedMappings = MetadataCreateIndexService.parseV1Mappings( + Map parsedMappings = MetadataCreateIndexService.parseV1Mappings( request.mappings(), - Collections.singletonList(convertMappings(templateMetadata.mappings())), + Collections.singletonList(templateMetadata.mappings()), xContentRegistry() ); List resolvedAliases = resolveAndValidateAliases( @@ -816,12 +813,10 @@ public void testRequestDataHavePriorityOverTemplateData() throws Exception { assertThat(resolvedAliases.get(0).getSearchRouting(), equalTo("fromRequest")); assertThat(aggregatedIndexSettings.get("key1"), equalTo("requestValue")); - assertThat(parsedMappings, hasKey("type")); - Map mappingType = parsedMappings.get("type"); - assertThat(mappingType, hasKey("type")); - Map type = (Map) mappingType.get("type"); - assertThat(type, hasKey("properties")); - Map mappingsProperties = (Map) type.get("properties"); + assertThat(parsedMappings, hasKey("_doc")); + Map doc = (Map) parsedMappings.get("_doc"); + assertThat(doc, hasKey("properties")); + Map mappingsProperties = (Map) doc.get("properties"); assertThat(mappingsProperties, hasKey("test")); assertThat((Map) mappingsProperties.get("test"), hasValue("keyword")); } @@ -1046,9 +1041,9 @@ public void testParseMappingsWithTypedTemplateAndTypelessIndexMapping() throws E } }); - Map> mappings = parseV1Mappings( - singletonMap(MapperService.SINGLE_MAPPING_NAME, "{\"_doc\":{}}"), - Collections.singletonList(convertMappings(templateMetadata.mappings())), + Map mappings = parseV1Mappings( + "{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{}}", + Collections.singletonList(templateMetadata.mappings()), xContentRegistry() ); assertThat(mappings, Matchers.hasKey(MapperService.SINGLE_MAPPING_NAME)); @@ -1062,12 +1057,8 @@ public void testParseMappingsWithTypedTemplate() throws Exception { ExceptionsHelper.reThrowIfNotNull(e); } }); - Map> mappings = parseV1Mappings( - emptyMap(), - Collections.singletonList(convertMappings(templateMetadata.mappings())), - xContentRegistry() - ); - assertThat(mappings, Matchers.hasKey("type")); + Map mappings = parseV1Mappings("", Collections.singletonList(templateMetadata.mappings()), xContentRegistry()); + assertThat(mappings, Matchers.hasKey(MapperService.SINGLE_MAPPING_NAME)); } public void testParseMappingsWithTypelessTemplate() throws Exception { @@ -1078,11 +1069,7 @@ public void testParseMappingsWithTypelessTemplate() throws Exception { ExceptionsHelper.reThrowIfNotNull(e); } }); - Map> mappings = parseV1Mappings( - emptyMap(), - Collections.singletonList(convertMappings(templateMetadata.mappings())), - xContentRegistry() - ); + Map mappings = parseV1Mappings("", Collections.singletonList(templateMetadata.mappings()), xContentRegistry()); assertThat(mappings, Matchers.hasKey(MapperService.SINGLE_MAPPING_NAME)); } @@ -1100,16 +1087,7 @@ public void testBuildIndexMetadata() { .put(SETTING_NUMBER_OF_SHARDS, 1) .build(); List aliases = singletonList(AliasMetadata.builder("alias1").build()); - IndexMetadata indexMetadata = buildIndexMetadata( - "test", - aliases, - () -> null, - () -> null, - indexSettings, - 4, - sourceIndexMetadata, - false - ); + IndexMetadata indexMetadata = buildIndexMetadata("test", aliases, () -> null, indexSettings, 4, sourceIndexMetadata, false); assertThat(indexMetadata.getAliases().size(), is(1)); assertThat(indexMetadata.getAliases().keys().iterator().next().value, is("alias1")); @@ -1262,7 +1240,7 @@ private CompressedXContent createMapping(String fieldName, String fieldType) { final String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject(fieldName) .field("type", fieldType) diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataIndexTemplateServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataIndexTemplateServiceTests.java index 685a2288a9128..19e73422c5362 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataIndexTemplateServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataIndexTemplateServiceTests.java @@ -970,7 +970,6 @@ public void testFindV2InvalidGlobalTemplate() { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/57393") public void testResolveConflictingMappings() throws Exception { final MetadataIndexTemplateService service = getMetadataIndexTemplateService(); ClusterState state = ClusterState.EMPTY_STATE; @@ -2066,6 +2065,27 @@ public void testUnreferencedDataStreamsWhenAddingTemplate() throws Exception { service.addIndexTemplateV2(stateWithDSAndTemplate, false, "logs", nonDSTemplate); } + public void testLegacyNoopUpdate() { + ClusterState state = ClusterState.EMPTY_STATE; + PutRequest pr = new PutRequest("api", "id"); + pr.patterns(Arrays.asList("foo", "bar")); + if (randomBoolean()) { + pr.settings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 3).build()); + } + if (randomBoolean()) { + pr.mappings(Collections.emptyMap()); + } + if (randomBoolean()) { + pr.aliases(Collections.singleton(new Alias("alias"))); + } + pr.order(randomIntBetween(0, 10)); + state = MetadataIndexTemplateService.innerPutTemplate(state, pr, new IndexTemplateMetadata.Builder("id")); + + assertNotNull(state.metadata().templates().get("id")); + + assertThat(MetadataIndexTemplateService.innerPutTemplate(state, pr, new IndexTemplateMetadata.Builder("id")), equalTo(state)); + } + private static List putTemplate(NamedXContentRegistry xContentRegistry, PutRequest request) { MetadataCreateIndexService createIndexService = new MetadataCreateIndexService( Settings.EMPTY, diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java index 73b10789ac4f5..94bf162303127 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java @@ -32,15 +32,11 @@ package org.opensearch.cluster.metadata; -import org.opensearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.opensearch.action.admin.indices.mapping.put.PutMappingClusterStateUpdateRequest; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.ClusterStateTaskExecutor; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.Strings; import org.opensearch.common.compress.CompressedXContent; -import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.index.Index; import org.opensearch.index.IndexService; import org.opensearch.index.mapper.MapperService; @@ -51,7 +47,6 @@ import java.util.Collection; import java.util.Collections; -import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -63,15 +58,19 @@ protected Collection> getPlugins() { } public void testMappingClusterStateUpdateDoesntChangeExistingIndices() throws Exception { - final IndexService indexService = createIndex("test", client().admin().indices().prepareCreate("test").addMapping("type")); - final CompressedXContent currentMapping = indexService.mapperService().documentMapper("type").mappingSource(); + final IndexService indexService = createIndex( + "test", + client().admin().indices().prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME) + ); + final CompressedXContent currentMapping = indexService.mapperService().documentMapper().mappingSource(); final MetadataMappingService mappingService = getInstanceFromNode(MetadataMappingService.class); final ClusterService clusterService = getInstanceFromNode(ClusterService.class); // TODO - it will be nice to get a random mapping generator - final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("type"); + final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest( + "{ \"properties\": { \"field\": { \"type\": \"text\" }}}" + ); request.indices(new Index[] { indexService.index() }); - request.source("{ \"properties\": { \"field\": { \"type\": \"text\" }}}"); final ClusterStateTaskExecutor.ClusterTasksResult result = mappingService.putMappingExecutor .execute(clusterService.state(), Collections.singletonList(request)); // the task completed successfully @@ -79,11 +78,11 @@ public void testMappingClusterStateUpdateDoesntChangeExistingIndices() throws Ex assertTrue(result.executionResults.values().iterator().next().isSuccess()); // the task really was a mapping update assertThat( - indexService.mapperService().documentMapper("type").mappingSource(), - not(equalTo(result.resultingState.metadata().index("test").getMappings().get("type").source())) + indexService.mapperService().documentMapper().mappingSource(), + not(equalTo(result.resultingState.metadata().index("test").mapping().source())) ); // since we never committed the cluster state update, the in-memory state is unchanged - assertThat(indexService.mapperService().documentMapper("type").mappingSource(), equalTo(currentMapping)); + assertThat(indexService.mapperService().documentMapper().mappingSource(), equalTo(currentMapping)); } public void testClusterStateIsNotChangedWithIdenticalMappings() throws Exception { @@ -91,8 +90,9 @@ public void testClusterStateIsNotChangedWithIdenticalMappings() throws Exception final MetadataMappingService mappingService = getInstanceFromNode(MetadataMappingService.class); final ClusterService clusterService = getInstanceFromNode(ClusterService.class); - final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("type"); - request.source("{ \"properties\" { \"field\": { \"type\": \"text\" }}}"); + final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest( + "{ \"properties\" { \"field\": { \"type\": \"text\" }}}" + ); ClusterState result = mappingService.putMappingExecutor.execute( clusterService.state(), Collections.singletonList(request) @@ -110,9 +110,10 @@ public void testMappingVersion() throws Exception { final long previousVersion = indexService.getMetadata().getMappingVersion(); final MetadataMappingService mappingService = getInstanceFromNode(MetadataMappingService.class); final ClusterService clusterService = getInstanceFromNode(ClusterService.class); - final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("type"); + final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest( + "{ \"properties\": { \"field\": { \"type\": \"text\" }}}" + ); request.indices(new Index[] { indexService.index() }); - request.source("{ \"properties\": { \"field\": { \"type\": \"text\" }}}"); final ClusterStateTaskExecutor.ClusterTasksResult result = mappingService.putMappingExecutor .execute(clusterService.state(), Collections.singletonList(request)); assertThat(result.executionResults.size(), equalTo(1)); @@ -125,116 +126,12 @@ public void testMappingVersionUnchanged() throws Exception { final long previousVersion = indexService.getMetadata().getMappingVersion(); final MetadataMappingService mappingService = getInstanceFromNode(MetadataMappingService.class); final ClusterService clusterService = getInstanceFromNode(ClusterService.class); - final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("type"); + final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest("{ \"properties\": {}}"); request.indices(new Index[] { indexService.index() }); - request.source("{ \"properties\": {}}"); final ClusterStateTaskExecutor.ClusterTasksResult result = mappingService.putMappingExecutor .execute(clusterService.state(), Collections.singletonList(request)); assertThat(result.executionResults.size(), equalTo(1)); assertTrue(result.executionResults.values().iterator().next().isSuccess()); assertThat(result.resultingState.metadata().index("test").getMappingVersion(), equalTo(previousVersion)); } - - public void testMappingUpdateAccepts_docAsType() throws Exception { - final IndexService indexService = createIndex("test", client().admin().indices().prepareCreate("test").addMapping("my_type")); - final MetadataMappingService mappingService = getInstanceFromNode(MetadataMappingService.class); - final ClusterService clusterService = getInstanceFromNode(ClusterService.class); - final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type( - MapperService.SINGLE_MAPPING_NAME - ); - request.indices(new Index[] { indexService.index() }); - request.source("{ \"properties\": { \"foo\": { \"type\": \"keyword\" } }}"); - final ClusterStateTaskExecutor.ClusterTasksResult result = mappingService.putMappingExecutor - .execute(clusterService.state(), Collections.singletonList(request)); - assertThat(result.executionResults.size(), equalTo(1)); - assertTrue(result.executionResults.values().iterator().next().isSuccess()); - MappingMetadata mappingMetadata = result.resultingState.metadata().index("test").mapping(); - assertEquals("my_type", mappingMetadata.type()); - assertEquals( - Collections.singletonMap("properties", Collections.singletonMap("foo", Collections.singletonMap("type", "keyword"))), - mappingMetadata.sourceAsMap() - ); - } - - public void testForbidMultipleTypes() throws Exception { - CreateIndexRequestBuilder createIndexRequest = client().admin() - .indices() - .prepareCreate("test") - .addMapping(MapperService.SINGLE_MAPPING_NAME); - IndexService indexService = createIndex("test", createIndexRequest); - - MetadataMappingService mappingService = getInstanceFromNode(MetadataMappingService.class); - ClusterService clusterService = getInstanceFromNode(ClusterService.class); - - PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("other_type") - .indices(new Index[] { indexService.index() }) - .source(Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("other_type").endObject().endObject())); - ClusterStateTaskExecutor.ClusterTasksResult result = mappingService.putMappingExecutor.execute( - clusterService.state(), - Collections.singletonList(request) - ); - assertThat(result.executionResults.size(), equalTo(1)); - - ClusterStateTaskExecutor.TaskResult taskResult = result.executionResults.values().iterator().next(); - assertFalse(taskResult.isSuccess()); - assertThat( - taskResult.getFailure().getMessage(), - containsString("Rejecting mapping update to [test] as the final mapping would have more than 1 type: ") - ); - } - - /** - * This test checks that the multi-type validation is done before we do any other kind of validation - * on the mapping that's added, see https://github.com/elastic/elasticsearch/issues/29313 - */ - public void testForbidMultipleTypesWithConflictingMappings() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject(MapperService.SINGLE_MAPPING_NAME) - .startObject("properties") - .startObject("field1") - .field("type", "text") - .endObject() - .endObject() - .endObject() - .endObject(); - - CreateIndexRequestBuilder createIndexRequest = client().admin() - .indices() - .prepareCreate("test") - .addMapping(MapperService.SINGLE_MAPPING_NAME, mapping); - IndexService indexService = createIndex("test", createIndexRequest); - - MetadataMappingService mappingService = getInstanceFromNode(MetadataMappingService.class); - ClusterService clusterService = getInstanceFromNode(ClusterService.class); - - String conflictingMapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("other_type") - .startObject("properties") - .startObject("field1") - .field("type", "keyword") - .endObject() - .endObject() - .endObject() - .endObject() - ); - - PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("other_type") - .indices(new Index[] { indexService.index() }) - .source(conflictingMapping); - ClusterStateTaskExecutor.ClusterTasksResult result = mappingService.putMappingExecutor.execute( - clusterService.state(), - Collections.singletonList(request) - ); - assertThat(result.executionResults.size(), equalTo(1)); - - ClusterStateTaskExecutor.TaskResult taskResult = result.executionResults.values().iterator().next(); - assertFalse(taskResult.isSuccess()); - assertThat( - taskResult.getFailure().getMessage(), - containsString("Rejecting mapping update to [test] as the final mapping would have more than 1 type: ") - ); - } } diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataTests.java index 2ef9cf3bc9b1f..4e7502ada661f 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataTests.java @@ -644,7 +644,7 @@ public void testFindMappings() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ) - .putMapping("_doc", FIND_MAPPINGS_TEST_ITEM) + .putMapping(FIND_MAPPINGS_TEST_ITEM) ) .put( IndexMetadata.builder("index2") @@ -654,39 +654,25 @@ public void testFindMappings() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ) - .putMapping("_doc", FIND_MAPPINGS_TEST_ITEM) + .putMapping(FIND_MAPPINGS_TEST_ITEM) ) .build(); { - ImmutableOpenMap> mappings = metadata.findMappings( - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - MapperPlugin.NOOP_FIELD_FILTER - ); - assertEquals(0, mappings.size()); - } - { - ImmutableOpenMap> mappings = metadata.findMappings( - new String[] { "index1" }, - new String[] { "notfound" }, - MapperPlugin.NOOP_FIELD_FILTER - ); + ImmutableOpenMap mappings = metadata.findMappings(Strings.EMPTY_ARRAY, MapperPlugin.NOOP_FIELD_FILTER); assertEquals(0, mappings.size()); } { - ImmutableOpenMap> mappings = metadata.findMappings( + ImmutableOpenMap mappings = metadata.findMappings( new String[] { "index1" }, - Strings.EMPTY_ARRAY, MapperPlugin.NOOP_FIELD_FILTER ); assertEquals(1, mappings.size()); assertIndexMappingsNotFiltered(mappings, "index1"); } { - ImmutableOpenMap> mappings = metadata.findMappings( + ImmutableOpenMap mappings = metadata.findMappings( new String[] { "index1", "index2" }, - new String[] { randomBoolean() ? "_doc" : "_all" }, MapperPlugin.NOOP_FIELD_FILTER ); assertEquals(2, mappings.size()); @@ -715,43 +701,19 @@ public void testFindMappingsNoOpFilters() throws IOException { .build(); { - ImmutableOpenMap> mappings = metadata.findMappings( - new String[] { "index1" }, - randomBoolean() ? Strings.EMPTY_ARRAY : new String[] { "_all" }, - MapperPlugin.NOOP_FIELD_FILTER - ); - ImmutableOpenMap index1 = mappings.get("index1"); - MappingMetadata mappingMetadata = index1.get("_doc"); - assertSame(originalMappingMetadata, mappingMetadata); - } - { - ImmutableOpenMap> mappings = metadata.findMappings( - new String[] { "index1" }, - randomBoolean() ? Strings.EMPTY_ARRAY : new String[] { "_all" }, - index -> field -> randomBoolean() - ); - ImmutableOpenMap index1 = mappings.get("index1"); - MappingMetadata mappingMetadata = index1.get("_doc"); - assertNotSame(originalMappingMetadata, mappingMetadata); - } - { - ImmutableOpenMap> mappings = metadata.findMappings( + ImmutableOpenMap mappings = metadata.findMappings( new String[] { "index1" }, - new String[] { "_doc" }, MapperPlugin.NOOP_FIELD_FILTER ); - ImmutableOpenMap index1 = mappings.get("index1"); - MappingMetadata mappingMetadata = index1.get("_doc"); + MappingMetadata mappingMetadata = mappings.get("index1"); assertSame(originalMappingMetadata, mappingMetadata); } { - ImmutableOpenMap> mappings = metadata.findMappings( + ImmutableOpenMap mappings = metadata.findMappings( new String[] { "index1" }, - new String[] { "_doc" }, index -> field -> randomBoolean() ); - ImmutableOpenMap index1 = mappings.get("index1"); - MappingMetadata mappingMetadata = index1.get("_doc"); + MappingMetadata mappingMetadata = mappings.get("index1"); assertNotSame(originalMappingMetadata, mappingMetadata); } } @@ -777,7 +739,7 @@ public void testFindMappingsWithFilters() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ) - .putMapping("_doc", mapping) + .putMapping(mapping) ) .put( IndexMetadata.builder("index2") @@ -787,7 +749,7 @@ public void testFindMappingsWithFilters() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ) - .putMapping("_doc", mapping) + .putMapping(mapping) ) .put( IndexMetadata.builder("index3") @@ -797,14 +759,13 @@ public void testFindMappingsWithFilters() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ) - .putMapping("_doc", mapping) + .putMapping(mapping) ) .build(); { - ImmutableOpenMap> mappings = metadata.findMappings( + ImmutableOpenMap mappings = metadata.findMappings( new String[] { "index1", "index2", "index3" }, - new String[] { "_doc" }, index -> { if (index.equals("index1")) { return field -> field.startsWith("name.") == false @@ -822,11 +783,7 @@ public void testFindMappingsWithFilters() throws IOException { assertIndexMappingsNoFields(mappings, "index2"); assertIndexMappingsNotFiltered(mappings, "index3"); - ImmutableOpenMap index1Mappings = mappings.get("index1"); - assertNotNull(index1Mappings); - - assertEquals(1, index1Mappings.size()); - MappingMetadata docMapping = index1Mappings.get("_doc"); + MappingMetadata docMapping = mappings.get("index1"); assertNotNull(docMapping); Map sourceAsMap = docMapping.getSourceAsMap(); @@ -868,17 +825,14 @@ public void testFindMappingsWithFilters() throws IOException { } { - ImmutableOpenMap> mappings = metadata.findMappings( + ImmutableOpenMap mappings = metadata.findMappings( new String[] { "index1", "index2", "index3" }, - new String[] { "_doc" }, index -> field -> (index.equals("index3") && field.endsWith("keyword")) ); assertIndexMappingsNoFields(mappings, "index1"); assertIndexMappingsNoFields(mappings, "index2"); - ImmutableOpenMap index3 = mappings.get("index3"); - assertEquals(1, index3.size()); - MappingMetadata mappingMetadata = index3.get("_doc"); + MappingMetadata mappingMetadata = mappings.get("index3"); Map sourceAsMap = mappingMetadata.getSourceAsMap(); assertEquals(3, sourceAsMap.size()); assertTrue(sourceAsMap.containsKey("_routing")); @@ -906,9 +860,8 @@ public void testFindMappingsWithFilters() throws IOException { } { - ImmutableOpenMap> mappings = metadata.findMappings( + ImmutableOpenMap mappings = metadata.findMappings( new String[] { "index1", "index2", "index3" }, - new String[] { "_doc" }, index -> field -> (index.equals("index2")) ); @@ -928,14 +881,8 @@ private static IndexMetadata.Builder buildIndexMetadata(String name, String alia } @SuppressWarnings("unchecked") - private static void assertIndexMappingsNoFields( - ImmutableOpenMap> mappings, - String index - ) { - ImmutableOpenMap indexMappings = mappings.get(index); - assertNotNull(indexMappings); - assertEquals(1, indexMappings.size()); - MappingMetadata docMapping = indexMappings.get("_doc"); + private static void assertIndexMappingsNoFields(ImmutableOpenMap mappings, String index) { + MappingMetadata docMapping = mappings.get(index); assertNotNull(docMapping); Map sourceAsMap = docMapping.getSourceAsMap(); assertEquals(3, sourceAsMap.size()); @@ -946,15 +893,8 @@ private static void assertIndexMappingsNoFields( } @SuppressWarnings("unchecked") - private static void assertIndexMappingsNotFiltered( - ImmutableOpenMap> mappings, - String index - ) { - ImmutableOpenMap indexMappings = mappings.get(index); - assertNotNull(indexMappings); - - assertEquals(1, indexMappings.size()); - MappingMetadata docMapping = indexMappings.get("_doc"); + private static void assertIndexMappingsNotFiltered(ImmutableOpenMap mappings, String index) { + MappingMetadata docMapping = mappings.get(index); assertNotNull(docMapping); Map sourceAsMap = docMapping.getSourceAsMap(); diff --git a/server/src/test/java/org/opensearch/cluster/metadata/ToAndFromJsonMetadataTests.java b/server/src/test/java/org/opensearch/cluster/metadata/ToAndFromJsonMetadataTests.java index 14996b5ebf453..253018d7f569f 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/ToAndFromJsonMetadataTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/ToAndFromJsonMetadataTests.java @@ -112,8 +112,7 @@ public void testSimpleJsonFromAndTo() throws IOException { .creationDate(2L) .numberOfShards(1) .numberOfReplicas(2) - .putMapping("mapping1", MAPPING_SOURCE1) - .putMapping("mapping2", MAPPING_SOURCE2) + .putMapping(MAPPING_SOURCE1) .putAlias(newAliasMetadataBuilder("alias1").filter(ALIAS_FILTER1)) .putAlias(newAliasMetadataBuilder("alias3").writeIndex(randomBoolean() ? null : randomBoolean())) .putAlias(newAliasMetadataBuilder("alias4").filter(ALIAS_FILTER2)) @@ -264,11 +263,9 @@ public void testToXContentGateway_FlatSettingTrue_ReduceMappingFalse() throws IO + Version.CURRENT.id + "\"\n" + " },\n" - + " \"mappings\" : [\n" - + " {\n" - + " \"key1\" : { }\n" - + " }\n" - + " ],\n" + + " \"mappings\" : {\n" + + " \"key1\" : { }\n" + + " },\n" + " \"aliases\" : { }\n" + " }\n" + " },\n" @@ -434,11 +431,7 @@ public void testToXContentGateway_FlatSettingFalse_ReduceMappingTrue() throws IO + " }\n" + " }\n" + " },\n" - + " \"mappings\" : {\n" - + " \"type\" : {\n" - + " \"key1\" : { }\n" - + " }\n" - + " },\n" + + " \"mappings\" : { },\n" + " \"aliases\" : { }\n" + " }\n" + " },\n" @@ -500,9 +493,7 @@ public void testToXContentAPI_FlatSettingTrue_ReduceMappingFalse() throws IOExce + "\"\n" + " },\n" + " \"mappings\" : {\n" - + " \"type\" : {\n" - + " \"key1\" : { }\n" - + " }\n" + + " \"key1\" : { }\n" + " },\n" + " \"aliases\" : { }\n" + " }\n" @@ -610,11 +601,7 @@ public void testToXContentAPI_FlatSettingFalse_ReduceMappingTrue() throws IOExce + " }\n" + " }\n" + " },\n" - + " \"mappings\" : {\n" - + " \"type\" : {\n" - + " \"key1\" : { }\n" - + " }\n" - + " },\n" + + " \"mappings\" : { },\n" + " \"aliases\" : { }\n" + " }\n" + " },\n" diff --git a/server/src/test/java/org/opensearch/cluster/routing/MovePrimaryFirstTests.java b/server/src/test/java/org/opensearch/cluster/routing/MovePrimaryFirstTests.java index fba6f1d48930b..aa2be1fb652cd 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/MovePrimaryFirstTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/MovePrimaryFirstTests.java @@ -12,15 +12,11 @@ import org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.opensearch.cluster.ClusterStateListener; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.TimeValue; import org.opensearch.test.InternalTestCluster; import org.opensearch.test.OpenSearchIntegTestCase; -import java.util.ArrayList; import java.util.Iterator; -import java.util.List; import java.util.concurrent.CountDownLatch; -import java.util.stream.Stream; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; @@ -53,14 +49,17 @@ protected void createAndIndex(String index, int replicaCount, int shardCount) { } /** - * Creates two nodes each in two zones and shuts down nodes in one zone - * after relocating half the number of shards. Since, primaries are relocated - * first, cluster should stay green as primary should have relocated + * Creates two nodes each in two zones and shuts down nodes in zone1 after + * relocating half the number of shards. Shards per node constraint ensures + * that exactly 50% of shards relocate to nodes in zone2 giving time to shut down + * nodes in zone1. Since primaries are relocated first as movePrimaryFirst is + * enabled, cluster should not become red and zone2 nodes have all the primaries */ public void testClusterGreenAfterPartialRelocation() throws InterruptedException { internalCluster().startMasterOnlyNodes(1); final String z1 = "zone-1", z2 = "zone-2"; - final int primaryShardCount = 100; + final int primaryShardCount = 6; + assertTrue("Primary shard count must be even for equal distribution across two nodes", primaryShardCount % 2 == 0); final String z1n1 = startDataOnlyNode(z1); ensureGreen(); createAndIndex("foo", 1, primaryShardCount); @@ -88,24 +87,17 @@ public void testClusterGreenAfterPartialRelocation() throws InterruptedException if (event.routingTableChanged()) { final RoutingNodes routingNodes = event.state().getRoutingNodes(); int startedCount = 0; - List initz2n1 = new ArrayList<>(), initz2n2 = new ArrayList<>(); for (Iterator it = routingNodes.iterator(); it.hasNext();) { RoutingNode routingNode = it.next(); final String nodeName = routingNode.node().getName(); - if (nodeName.equals(z2n1)) { + if (nodeName.equals(z2n1) || nodeName.equals(z2n2)) { startedCount += routingNode.numberOfShardsWithState(ShardRoutingState.STARTED); - initz2n1 = routingNode.shardsWithState(ShardRoutingState.INITIALIZING); - } else if (nodeName.equals(z2n2)) { - startedCount += routingNode.numberOfShardsWithState(ShardRoutingState.STARTED); - initz2n2 = routingNode.shardsWithState(ShardRoutingState.INITIALIZING); } } - if (!Stream.concat(initz2n1.stream(), initz2n2.stream()).anyMatch(s -> s.primary())) { - // All primaries are relocated before 60% of total shards are started on new nodes - final int totalShardCount = primaryShardCount * 2; - if (primaryShardCount <= startedCount && startedCount <= 3 * totalShardCount / 5) { - primaryMoveLatch.countDown(); - } + + // Count down the latch once all the primary shards have initialized on nodes in zone-2 + if (startedCount == primaryShardCount) { + primaryMoveLatch.countDown(); } } }; @@ -113,15 +105,23 @@ public void testClusterGreenAfterPartialRelocation() throws InterruptedException // Exclude zone1 nodes for allocation and await latch count down settingsRequest = new ClusterUpdateSettingsRequest(); - settingsRequest.persistentSettings(Settings.builder().put("cluster.routing.allocation.exclude.zone", z1)); + settingsRequest.persistentSettings( + Settings.builder() + .put("cluster.routing.allocation.exclude.zone", z1) + // Total shards per node constraint is added to pause the relocation after primary shards + // have relocated to allow time for node shutdown and validate yellow cluster + .put("cluster.routing.allocation.total_shards_per_node", primaryShardCount / 2) + ); client().admin().cluster().updateSettings(settingsRequest); primaryMoveLatch.await(); - // Shutdown both nodes in zone and ensure cluster stays green + // Shutdown both nodes in zone 1 and ensure cluster does not become red try { internalCluster().stopRandomNode(InternalTestCluster.nameFilter(z1n1)); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(z1n2)); } catch (Exception e) {} - ensureGreen(TimeValue.timeValueSeconds(60)); + // Due to shards per node constraint cluster cannot be green + // Since yellow suffices for this test, not removing shards constraint + ensureYellow(); } } diff --git a/server/src/test/java/org/opensearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java b/server/src/test/java/org/opensearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java index 2b408097757a5..0b00d26182346 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java @@ -95,7 +95,7 @@ public void testFilterInitialRecovery() { assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), UNASSIGNED); assertNull(routingTable.index("idx").shard(0).shards().get(0).currentNodeId()); - // after failing the shard we are unassigned since the node is blacklisted and we can't initialize on the other node + // after failing the shard we are unassigned since the node is denylisted and we can't initialize on the other node RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, state.getRoutingNodes(), state, null, null, 0); allocation.debugDecision(true); Decision.Single decision = (Decision.Single) filterAllocationDecider.canAllocate( diff --git a/server/src/test/java/org/opensearch/common/concurrent/GatedAutoCloseableTests.java b/server/src/test/java/org/opensearch/common/concurrent/GatedAutoCloseableTests.java new file mode 100644 index 0000000000000..63058da8f163a --- /dev/null +++ b/server/src/test/java/org/opensearch/common/concurrent/GatedAutoCloseableTests.java @@ -0,0 +1,46 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +import org.junit.Before; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.concurrent.atomic.AtomicInteger; + +public class GatedAutoCloseableTests extends OpenSearchTestCase { + + private AtomicInteger testRef; + private GatedAutoCloseable testObject; + + @Before + public void setup() { + testRef = new AtomicInteger(0); + testObject = new GatedAutoCloseable<>(testRef, testRef::incrementAndGet); + } + + public void testGet() { + assertEquals(0, testObject.get().get()); + } + + public void testClose() { + testObject.close(); + assertEquals(1, testObject.get().get()); + } + + public void testIdempotent() { + testObject.close(); + testObject.close(); + assertEquals(1, testObject.get().get()); + } +} diff --git a/server/src/test/java/org/opensearch/common/concurrent/GatedCloseableTests.java b/server/src/test/java/org/opensearch/common/concurrent/GatedCloseableTests.java new file mode 100644 index 0000000000000..0645f971b8d63 --- /dev/null +++ b/server/src/test/java/org/opensearch/common/concurrent/GatedCloseableTests.java @@ -0,0 +1,60 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +import org.junit.Before; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.IOException; +import java.nio.file.FileSystem; + +import static org.mockito.Mockito.atMostOnce; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; + +public class GatedCloseableTests extends OpenSearchTestCase { + + private FileSystem testRef; + GatedCloseable testObject; + + @Before + public void setup() { + testRef = mock(FileSystem.class); + testObject = new GatedCloseable<>(testRef, testRef::close); + } + + public void testGet() throws Exception { + assertNotNull(testObject.get()); + assertEquals(testRef, testObject.get()); + verify(testRef, never()).close(); + } + + public void testClose() throws IOException { + testObject.close(); + verify(testRef, atMostOnce()).close(); + } + + public void testIdempotent() throws IOException { + testObject.close(); + testObject.close(); + verify(testRef, atMostOnce()).close(); + } + + public void testException() throws IOException { + doThrow(new IOException()).when(testRef).close(); + assertThrows(IOException.class, () -> testObject.close()); + } +} diff --git a/server/src/test/java/org/opensearch/common/concurrent/OneWayGateTests.java b/server/src/test/java/org/opensearch/common/concurrent/OneWayGateTests.java new file mode 100644 index 0000000000000..357bf3ae321f8 --- /dev/null +++ b/server/src/test/java/org/opensearch/common/concurrent/OneWayGateTests.java @@ -0,0 +1,41 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +import org.junit.Before; +import org.opensearch.test.OpenSearchTestCase; + +public class OneWayGateTests extends OpenSearchTestCase { + + private OneWayGate testGate; + + @Before + public void setup() { + testGate = new OneWayGate(); + } + + public void testGateOpen() { + assertFalse(testGate.isClosed()); + } + + public void testGateClosed() { + testGate.close(); + assertTrue(testGate.isClosed()); + } + + public void testGateIdempotent() { + assertTrue(testGate.close()); + assertFalse(testGate.close()); + } +} diff --git a/server/src/test/java/org/opensearch/common/concurrent/RefCountedReleasableTests.java b/server/src/test/java/org/opensearch/common/concurrent/RefCountedReleasableTests.java new file mode 100644 index 0000000000000..63c0873f1593d --- /dev/null +++ b/server/src/test/java/org/opensearch/common/concurrent/RefCountedReleasableTests.java @@ -0,0 +1,68 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +import org.junit.Before; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.concurrent.atomic.AtomicInteger; + +public class RefCountedReleasableTests extends OpenSearchTestCase { + + private AtomicInteger testRef; + private RefCountedReleasable testObject; + + @Before + public void setup() { + testRef = new AtomicInteger(0); + testObject = new RefCountedReleasable<>("test", testRef, testRef::incrementAndGet); + } + + public void testInitialState() { + assertEquals("test", testObject.getName()); + assertEquals(testRef, testObject.get()); + assertEquals(testRef, testObject.get()); + assertEquals(0, testObject.get().get()); + assertEquals(1, testObject.refCount()); + } + + public void testIncRef() { + testObject.incRef(); + assertEquals(2, testObject.refCount()); + assertEquals(0, testObject.get().get()); + } + + public void testCloseWithoutInternal() { + testObject.incRef(); + assertEquals(2, testObject.refCount()); + testObject.close(); + assertEquals(1, testObject.refCount()); + assertEquals(0, testObject.get().get()); + } + + public void testCloseWithInternal() { + assertEquals(1, testObject.refCount()); + testObject.close(); + assertEquals(0, testObject.refCount()); + assertEquals(1, testObject.get().get()); + } + + public void testIncRefAfterClose() { + assertEquals(1, testObject.refCount()); + testObject.close(); + assertEquals(0, testObject.refCount()); + assertEquals(1, testObject.get().get()); + assertThrows(IllegalStateException.class, () -> testObject.incRef()); + } +} diff --git a/server/src/test/java/org/opensearch/common/joda/JavaJodaTimeDuellingTests.java b/server/src/test/java/org/opensearch/common/joda/JavaJodaTimeDuellingTests.java index 55e3e3ac6c1ef..30fcf4bb32989 100644 --- a/server/src/test/java/org/opensearch/common/joda/JavaJodaTimeDuellingTests.java +++ b/server/src/test/java/org/opensearch/common/joda/JavaJodaTimeDuellingTests.java @@ -66,8 +66,8 @@ protected boolean enableWarningsCheck() { public static void checkJvmProperties() { boolean runtimeJdk8 = JavaVersion.current().getVersion().get(0) == 8; assert (runtimeJdk8 && ("SPI,JRE".equals(System.getProperty("java.locale.providers")))) - || (false == runtimeJdk8 - && ("SPI,COMPAT".equals(System.getProperty("java.locale.providers")))) : "`-Djava.locale.providers` needs to be set"; + || (false == runtimeJdk8 && ("SPI,COMPAT".equals(System.getProperty("java.locale.providers")))) + : "`-Djava.locale.providers` needs to be set"; assumeFalse( "won't work in jdk8 " + "because SPI mechanism is not looking at classpath - needs ISOCalendarDataProvider in jre's ext/libs", runtimeJdk8 diff --git a/server/src/test/java/org/opensearch/common/time/DateFormattersTests.java b/server/src/test/java/org/opensearch/common/time/DateFormattersTests.java index b67372ea9e838..1e57f9fe88d9c 100644 --- a/server/src/test/java/org/opensearch/common/time/DateFormattersTests.java +++ b/server/src/test/java/org/opensearch/common/time/DateFormattersTests.java @@ -95,16 +95,127 @@ public void testEpochMillisParser() { Instant instant = Instant.from(formatter.parse("12345")); assertThat(instant.getEpochSecond(), is(12L)); assertThat(instant.getNano(), is(345_000_000)); + assertThat(formatter.format(instant), is("12345")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); } { Instant instant = Instant.from(formatter.parse("0")); assertThat(instant.getEpochSecond(), is(0L)); assertThat(instant.getNano(), is(0)); + assertThat(formatter.format(instant), is("0")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-123000.123456")); + assertThat(instant.getEpochSecond(), is(-124L)); + assertThat(instant.getNano(), is(999876544)); + assertThat(formatter.format(instant), is("-123000.123456")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); } { Instant instant = Instant.from(formatter.parse("123.123456")); assertThat(instant.getEpochSecond(), is(0L)); assertThat(instant.getNano(), is(123123456)); + assertThat(formatter.format(instant), is("123.123456")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-123.123456")); + assertThat(instant.getEpochSecond(), is(-1L)); + assertThat(instant.getNano(), is(876876544)); + assertThat(formatter.format(instant), is("-123.123456")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-6789123.123456")); + assertThat(instant.getEpochSecond(), is(-6790L)); + assertThat(instant.getNano(), is(876876544)); + assertThat(formatter.format(instant), is("-6789123.123456")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("6789123.123456")); + assertThat(instant.getEpochSecond(), is(6789L)); + assertThat(instant.getNano(), is(123123456)); + assertThat(formatter.format(instant), is("6789123.123456")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-6250000430768.25")); + assertThat(instant.getEpochSecond(), is(-6250000431L)); + assertThat(instant.getNano(), is(231750000)); + assertThat(formatter.format(instant), is("-6250000430768.25")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-6250000430768.75")); + assertThat(instant.getEpochSecond(), is(-6250000431L)); + assertThat(instant.getNano(), is(231250000)); + assertThat(formatter.format(instant), is("-6250000430768.75")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-6250000430768.00")); + assertThat(instant.getEpochSecond(), is(-6250000431L)); + assertThat(instant.getNano(), is(232000000)); + assertThat(formatter.format(instant), is("-6250000430768")); // remove .00 precision + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-6250000431000.250000")); + assertThat(instant.getEpochSecond(), is(-6250000432L)); + assertThat(instant.getNano(), is(999750000)); + assertThat(formatter.format(instant), is("-6250000431000.25")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-6250000431000.000001")); + assertThat(instant.getEpochSecond(), is(-6250000432L)); + assertThat(instant.getNano(), is(999999999)); + assertThat(formatter.format(instant), is("-6250000431000.000001")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-6250000431000.75")); + assertThat(instant.getEpochSecond(), is(-6250000432L)); + assertThat(instant.getNano(), is(999250000)); + assertThat(formatter.format(instant), is("-6250000431000.75")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-6250000431000.00")); + assertThat(instant.getEpochSecond(), is(-6250000431L)); + assertThat(instant.getNano(), is(0)); + assertThat(formatter.format(instant), is("-6250000431000")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-6250000431000")); + assertThat(instant.getEpochSecond(), is(-6250000431L)); + assertThat(instant.getNano(), is(0)); + assertThat(formatter.format(instant), is("-6250000431000")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-6250000430768")); + assertThat(instant.getEpochSecond(), is(-6250000431L)); + assertThat(instant.getNano(), is(232000000)); + assertThat(formatter.format(instant), is("-6250000430768")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("1680000430768")); + assertThat(instant.getEpochSecond(), is(1680000430L)); + assertThat(instant.getNano(), is(768000000)); + assertThat(formatter.format(instant), is("1680000430768")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); + } + { + Instant instant = Instant.from(formatter.parse("-0.12345")); + assertThat(instant.getEpochSecond(), is(-1L)); + assertThat(instant.getNano(), is(999876550)); + assertThat(formatter.format(instant), is("-0.12345")); + assertThat(Instant.from(formatter.parse(formatter.format(instant))), is(instant)); } } @@ -227,20 +338,69 @@ public void testEpochFormatting() { long seconds = randomLongBetween(0, 130L * 365 * 86400); // from 1970 epoch till around 2100 long nanos = randomLongBetween(0, 999_999_999L); Instant instant = Instant.ofEpochSecond(seconds, nanos); + { + DateFormatter millisFormatter = DateFormatter.forPattern("epoch_millis"); + String millis = millisFormatter.format(instant); + Instant millisInstant = Instant.from(millisFormatter.parse(millis)); + assertThat(millisInstant.toEpochMilli(), is(instant.toEpochMilli())); + assertThat(millisFormatter.format(Instant.ofEpochSecond(42, 0)), is("42000")); + assertThat(millisFormatter.format(Instant.ofEpochSecond(42, 123456789L)), is("42123.456789")); + + DateFormatter secondsFormatter = DateFormatter.forPattern("epoch_second"); + String formattedSeconds = secondsFormatter.format(instant); + Instant secondsInstant = Instant.from(secondsFormatter.parse(formattedSeconds)); + assertThat(secondsInstant.getEpochSecond(), is(instant.getEpochSecond())); + + assertThat(secondsFormatter.format(Instant.ofEpochSecond(42, 0)), is("42")); + } + { + DateFormatter isoFormatter = DateFormatters.forPattern("strict_date_optional_time_nanos"); + DateFormatter millisFormatter = DateFormatter.forPattern("epoch_millis"); + String millis = millisFormatter.format(instant); + String iso8601 = isoFormatter.format(instant); - DateFormatter millisFormatter = DateFormatter.forPattern("epoch_millis"); - String millis = millisFormatter.format(instant); - Instant millisInstant = Instant.from(millisFormatter.parse(millis)); - assertThat(millisInstant.toEpochMilli(), is(instant.toEpochMilli())); - assertThat(millisFormatter.format(Instant.ofEpochSecond(42, 0)), is("42000")); - assertThat(millisFormatter.format(Instant.ofEpochSecond(42, 123456789L)), is("42123.456789")); + Instant millisInstant = Instant.from(millisFormatter.parse(millis)); + Instant isoInstant = Instant.from(isoFormatter.parse(iso8601)); - DateFormatter secondsFormatter = DateFormatter.forPattern("epoch_second"); - String formattedSeconds = secondsFormatter.format(instant); - Instant secondsInstant = Instant.from(secondsFormatter.parse(formattedSeconds)); - assertThat(secondsInstant.getEpochSecond(), is(instant.getEpochSecond())); + assertThat(millisInstant.toEpochMilli(), is(isoInstant.toEpochMilli())); + assertThat(millisInstant.getEpochSecond(), is(isoInstant.getEpochSecond())); + assertThat(millisInstant.getNano(), is(isoInstant.getNano())); + } + } + + public void testEpochFormattingNegativeEpoch() { + long seconds = randomLongBetween(-130L * 365 * 86400, 0); // around 1840 till 1970 epoch + long nanos = randomLongBetween(0, 999_999_999L); + Instant instant = Instant.ofEpochSecond(seconds, nanos); - assertThat(secondsFormatter.format(Instant.ofEpochSecond(42, 0)), is("42")); + { + DateFormatter millisFormatter = DateFormatter.forPattern("epoch_millis"); + String millis = millisFormatter.format(instant); + Instant millisInstant = Instant.from(millisFormatter.parse(millis)); + assertThat(millisInstant.toEpochMilli(), is(instant.toEpochMilli())); + assertThat(millisFormatter.format(Instant.ofEpochSecond(-42, 0)), is("-42000")); + assertThat(millisFormatter.format(Instant.ofEpochSecond(-42, 123456789L)), is("-41876.543211")); + + DateFormatter secondsFormatter = DateFormatter.forPattern("epoch_second"); + String formattedSeconds = secondsFormatter.format(instant); + Instant secondsInstant = Instant.from(secondsFormatter.parse(formattedSeconds)); + assertThat(secondsInstant.getEpochSecond(), is(instant.getEpochSecond())); + + assertThat(secondsFormatter.format(Instant.ofEpochSecond(42, 0)), is("42")); + } + { + DateFormatter isoFormatter = DateFormatters.forPattern("strict_date_optional_time_nanos"); + DateFormatter millisFormatter = DateFormatter.forPattern("epoch_millis"); + String millis = millisFormatter.format(instant); + String iso8601 = isoFormatter.format(instant); + + Instant millisInstant = Instant.from(millisFormatter.parse(millis)); + Instant isoInstant = Instant.from(isoFormatter.parse(iso8601)); + + assertThat(millisInstant.toEpochMilli(), is(isoInstant.toEpochMilli())); + assertThat(millisInstant.getEpochSecond(), is(isoInstant.getEpochSecond())); + assertThat(millisInstant.getNano(), is(isoInstant.getNano())); + } } public void testParsingStrictNanoDates() { diff --git a/server/src/test/java/org/opensearch/index/IndexServiceTests.java b/server/src/test/java/org/opensearch/index/IndexServiceTests.java index 3592298c34995..be38b707b77b4 100644 --- a/server/src/test/java/org/opensearch/index/IndexServiceTests.java +++ b/server/src/test/java/org/opensearch/index/IndexServiceTests.java @@ -300,7 +300,7 @@ public void testRefreshActuallyWorks() throws Exception { assertEquals(1000, refreshTask.getInterval().millis()); assertTrue(indexService.getRefreshTask().mustReschedule()); IndexShard shard = indexService.getShard(0); - client().prepareIndex("test", "test", "0").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("0").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); // now disable the refresh client().admin() .indices() @@ -311,44 +311,38 @@ public void testRefreshActuallyWorks() throws Exception { // before that this is why we need to wait for the refresh task to be unscheduled and the first doc to be visible assertTrue(refreshTask.isClosed()); refreshTask = indexService.getRefreshTask(); - assertBusy( - () -> { - // this one either becomes visible due to a concurrently running scheduled refresh OR due to the force refresh - // we are running on updateMetadata if the interval changes - try (Engine.Searcher searcher = shard.acquireSearcher("test")) { - TopDocs search = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, search.totalHits.value); - } + assertBusy(() -> { + // this one either becomes visible due to a concurrently running scheduled refresh OR due to the force refresh + // we are running on updateMetadata if the interval changes + try (Engine.Searcher searcher = shard.acquireSearcher("test")) { + TopDocs search = searcher.search(new MatchAllDocsQuery(), 10); + assertEquals(1, search.totalHits.value); } - ); + }); assertFalse(refreshTask.isClosed()); // refresh every millisecond - client().prepareIndex("test", "test", "1").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); client().admin() .indices() .prepareUpdateSettings("test") .setSettings(Settings.builder().put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "1ms")) .get(); assertTrue(refreshTask.isClosed()); - assertBusy( - () -> { - // this one becomes visible due to the force refresh we are running on updateMetadata if the interval changes - try (Engine.Searcher searcher = shard.acquireSearcher("test")) { - TopDocs search = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(2, search.totalHits.value); - } + assertBusy(() -> { + // this one becomes visible due to the force refresh we are running on updateMetadata if the interval changes + try (Engine.Searcher searcher = shard.acquireSearcher("test")) { + TopDocs search = searcher.search(new MatchAllDocsQuery(), 10); + assertEquals(2, search.totalHits.value); } - ); - client().prepareIndex("test", "test", "2").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); - assertBusy( - () -> { - // this one becomes visible due to the scheduled refresh - try (Engine.Searcher searcher = shard.acquireSearcher("test")) { - TopDocs search = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(3, search.totalHits.value); - } + }); + client().prepareIndex("test").setId("2").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); + assertBusy(() -> { + // this one becomes visible due to the scheduled refresh + try (Engine.Searcher searcher = shard.acquireSearcher("test")) { + TopDocs search = searcher.search(new MatchAllDocsQuery(), 10); + assertEquals(3, search.totalHits.value); } - ); + }); } public void testAsyncFsyncActuallyWorks() throws Exception { @@ -359,7 +353,7 @@ public void testAsyncFsyncActuallyWorks() throws Exception { IndexService indexService = createIndex("test", settings); ensureGreen("test"); assertTrue(indexService.getRefreshTask().mustReschedule()); - client().prepareIndex("test", "test", "1").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); IndexShard shard = indexService.getShard(0); assertBusy(() -> assertFalse(shard.isSyncNeeded())); } @@ -381,7 +375,7 @@ public void testRescheduleAsyncFsync() throws Exception { assertNotNull(indexService.getFsyncTask()); assertTrue(indexService.getFsyncTask().mustReschedule()); - client().prepareIndex("test", "test", "1").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); assertNotNull(indexService.getFsyncTask()); final IndexShard shard = indexService.getShard(0); assertBusy(() -> assertFalse(shard.isSyncNeeded())); @@ -408,7 +402,7 @@ public void testAsyncTranslogTrimActuallyWorks() throws Exception { IndexService indexService = createIndex("test", settings); ensureGreen("test"); assertTrue(indexService.getTrimTranslogTask().mustReschedule()); - client().prepareIndex("test", "test", "1").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); + client().prepareIndex("test").setId("1").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); client().admin().indices().prepareFlush("test").get(); client().admin() .indices() diff --git a/server/src/test/java/org/opensearch/index/IndexingPressureServiceTests.java b/server/src/test/java/org/opensearch/index/IndexingPressureServiceTests.java index 531866dd82ee9..22d185643018a 100644 --- a/server/src/test/java/org/opensearch/index/IndexingPressureServiceTests.java +++ b/server/src/test/java/org/opensearch/index/IndexingPressureServiceTests.java @@ -51,11 +51,7 @@ public void testCoordinatingOperationForShardIndexingPressure() { Index index = new Index("IndexName", "UUID"); ShardId shardId = new ShardId(index, 0); BulkItemRequest[] items = new BulkItemRequest[1]; - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id").source( - Requests.INDEX_CONTENT_TYPE, - "foo", - "bar" - ); + DocWriteRequest writeRequest = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); items[0] = new BulkItemRequest(0, writeRequest); BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, WriteRequest.RefreshPolicy.NONE, items); Releasable releasable = service.markCoordinatingOperationStarted(shardId, bulkShardRequest::ramBytesUsed, false); diff --git a/server/src/test/java/org/opensearch/index/IndexingSlowLogTests.java b/server/src/test/java/org/opensearch/index/IndexingSlowLogTests.java index facb443422b31..38c8491d79150 100644 --- a/server/src/test/java/org/opensearch/index/IndexingSlowLogTests.java +++ b/server/src/test/java/org/opensearch/index/IndexingSlowLogTests.java @@ -223,7 +223,6 @@ public void testSlowLogMessageHasJsonFields() throws IOException { new NumericDocValuesField("version", 1), SeqNoFieldMapper.SequenceIDFields.emptySeqID(), "id", - "test", "routingValue", null, source, @@ -237,7 +236,6 @@ public void testSlowLogMessageHasJsonFields() throws IOException { assertThat(p.getValueFor("message"), equalTo("[foo/123]")); assertThat(p.getValueFor("took"), equalTo("10nanos")); assertThat(p.getValueFor("took_millis"), equalTo("0")); - assertThat(p.getValueFor("doc_type"), equalTo("test")); assertThat(p.getValueFor("id"), equalTo("id")); assertThat(p.getValueFor("routing"), equalTo("routingValue")); assertThat(p.getValueFor("source"), is(emptyOrNullString())); @@ -253,7 +251,6 @@ public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException { new NumericDocValuesField("version", 1), SeqNoFieldMapper.SequenceIDFields.emptySeqID(), "id", - "test", null, null, source, @@ -284,7 +281,6 @@ public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException { new NumericDocValuesField("version", 1), SeqNoFieldMapper.SequenceIDFields.emptySeqID(), "id", - "test", null, null, source, diff --git a/server/src/test/java/org/opensearch/index/SearchSlowLogTests.java b/server/src/test/java/org/opensearch/index/SearchSlowLogTests.java index b81e194ca4282..ae159092a4833 100644 --- a/server/src/test/java/org/opensearch/index/SearchSlowLogTests.java +++ b/server/src/test/java/org/opensearch/index/SearchSlowLogTests.java @@ -96,7 +96,7 @@ protected SearchContext createSearchContext(IndexService indexService) { protected SearchContext createSearchContext(IndexService indexService, String... groupStats) { BigArrays bigArrays = indexService.getBigArrays(); - final ShardSearchRequest request = new ShardSearchRequest(new ShardId(indexService.index(), 0), new String[0], 0L, null); + final ShardSearchRequest request = new ShardSearchRequest(new ShardId(indexService.index(), 0), 0L, null); return new TestSearchContext(bigArrays, indexService) { @Override public List groupStats() { @@ -258,30 +258,12 @@ public void testSlowLogHasJsonFields() throws IOException { assertThat(p.getValueFor("took"), equalTo("10nanos")); assertThat(p.getValueFor("took_millis"), equalTo("0")); assertThat(p.getValueFor("total_hits"), equalTo("-1")); - assertThat(p.getValueFor("types"), equalTo("[]")); assertThat(p.getValueFor("stats"), equalTo("[]")); assertThat(p.getValueFor("search_type"), Matchers.nullValue()); assertThat(p.getValueFor("total_shards"), equalTo("1")); assertThat(p.getValueFor("source"), equalTo("{\\\"query\\\":{\\\"match_all\\\":{\\\"boost\\\":1.0}}}")); } - public void testSlowLogWithTypes() throws IOException { - IndexService index = createIndex("foo"); - SearchContext searchContext = searchContextWithSourceAndTask(index); - searchContext.getQueryShardContext().setTypes("type1", "type2"); - SearchSlowLog.SearchSlowLogMessage p = new SearchSlowLog.SearchSlowLogMessage(searchContext, 10); - - assertThat(p.getValueFor("types"), equalTo("[\\\"type1\\\", \\\"type2\\\"]")); - - searchContext.getQueryShardContext().setTypes("type1"); - p = new SearchSlowLog.SearchSlowLogMessage(searchContext, 10); - assertThat(p.getValueFor("types"), equalTo("[\\\"type1\\\"]")); - - searchContext.getQueryShardContext().setTypes(); - p = new SearchSlowLog.SearchSlowLogMessage(searchContext, 10); - assertThat(p.getValueFor("types"), equalTo("[]")); - } - public void testSlowLogsWithStats() throws IOException { IndexService index = createIndex("foo"); SearchContext searchContext = createSearchContext(index, "group1"); diff --git a/server/src/test/java/org/opensearch/index/engine/EngineConfigFactoryTests.java b/server/src/test/java/org/opensearch/index/engine/EngineConfigFactoryTests.java index df3e8deb6d90a..a6bc87d53c004 100644 --- a/server/src/test/java/org/opensearch/index/engine/EngineConfigFactoryTests.java +++ b/server/src/test/java/org/opensearch/index/engine/EngineConfigFactoryTests.java @@ -14,6 +14,7 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.index.IndexSettings; import org.opensearch.index.codec.CodecService; +import org.opensearch.index.codec.CodecServiceFactory; import org.opensearch.index.seqno.RetentionLeases; import org.opensearch.index.translog.TranslogDeletionPolicy; import org.opensearch.index.translog.TranslogDeletionPolicyFactory; @@ -84,6 +85,18 @@ public void testCreateEngineConfigFromFactoryMultipleCodecServiceIllegalStateExc expectThrows(IllegalStateException.class, () -> new EngineConfigFactory(plugins, indexSettings)); } + public void testCreateEngineConfigFromFactoryMultipleCodecServiceAndFactoryIllegalStateException() { + IndexMetadata meta = IndexMetadata.builder("test") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(1) + .build(); + List plugins = Arrays.asList(new FooEnginePlugin(), new BakEnginePlugin()); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", meta.getSettings()); + + expectThrows(IllegalStateException.class, () -> new EngineConfigFactory(plugins, indexSettings)); + } + public void testCreateEngineConfigFromFactoryMultipleCustomTranslogDeletionPolicyFactoryIllegalStateException() { IndexMetadata meta = IndexMetadata.builder("test") .settings(settings(Version.CURRENT)) @@ -96,6 +109,43 @@ public void testCreateEngineConfigFromFactoryMultipleCustomTranslogDeletionPolic expectThrows(IllegalStateException.class, () -> new EngineConfigFactory(plugins, indexSettings)); } + public void testCreateCodecServiceFromFactory() { + IndexMetadata meta = IndexMetadata.builder("test") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(1) + .build(); + List plugins = Arrays.asList(new BakEnginePlugin()); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", meta.getSettings()); + + EngineConfigFactory factory = new EngineConfigFactory(plugins, indexSettings); + EngineConfig config = factory.newEngineConfig( + null, + null, + indexSettings, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + TimeValue.timeValueMinutes(5), + null, + null, + null, + null, + null, + () -> new RetentionLeases(0, 0, Collections.emptyList()), + null, + null + ); + assertNotNull(config.getCodec()); + } + private static class FooEnginePlugin extends Plugin implements EnginePlugin { @Override public Optional getEngineFactory(final IndexSettings indexSettings) { @@ -125,6 +175,18 @@ public Optional getCustomCodecService(IndexSettings indexSettings) } } + private static class BakEnginePlugin extends Plugin implements EnginePlugin { + @Override + public Optional getEngineFactory(final IndexSettings indexSettings) { + return Optional.empty(); + } + + @Override + public Optional getCustomCodecServiceFactory(IndexSettings indexSettings) { + return Optional.of(config -> new CodecService(config.getMapperService(), LogManager.getLogger(getClass()))); + } + } + private static class BazEnginePlugin extends Plugin implements EnginePlugin { @Override public Optional getEngineFactory(final IndexSettings indexSettings) { diff --git a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java index 928dad8685cfe..33f09a3e67db8 100644 --- a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java @@ -82,6 +82,8 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.SetOnce; +import org.hamcrest.MatcherAssert; +import org.hamcrest.Matchers; import org.opensearch.OpenSearchException; import org.opensearch.Version; import org.opensearch.action.ActionListener; @@ -101,6 +103,7 @@ import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.collect.Tuple; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.logging.Loggers; import org.opensearch.common.lucene.Lucene; import org.opensearch.common.lucene.index.OpenSearchDirectoryReader; @@ -121,16 +124,10 @@ import org.opensearch.index.VersionType; import org.opensearch.index.codec.CodecService; import org.opensearch.index.fieldvisitor.FieldsVisitor; -import org.opensearch.index.mapper.ContentPath; import org.opensearch.index.mapper.IdFieldMapper; -import org.opensearch.index.mapper.Mapper.BuilderContext; -import org.opensearch.index.mapper.MapperService; -import org.opensearch.index.mapper.Mapping; -import org.opensearch.index.mapper.MetadataFieldMapper; import org.opensearch.index.mapper.ParseContext; import org.opensearch.index.mapper.ParseContext.Document; import org.opensearch.index.mapper.ParsedDocument; -import org.opensearch.index.mapper.RootObjectMapper; import org.opensearch.index.mapper.SeqNoFieldMapper; import org.opensearch.index.mapper.SourceFieldMapper; import org.opensearch.index.mapper.Uid; @@ -154,8 +151,6 @@ import org.opensearch.test.IndexSettingsModule; import org.opensearch.test.VersionUtils; import org.opensearch.threadpool.ThreadPool; -import org.hamcrest.MatcherAssert; -import org.hamcrest.Matchers; import java.io.Closeable; import java.io.IOException; @@ -194,17 +189,7 @@ import java.util.stream.Collectors; import java.util.stream.LongStream; -import static java.util.Collections.emptyMap; import static java.util.Collections.shuffle; -import static org.opensearch.index.engine.Engine.Operation.Origin.LOCAL_RESET; -import static org.opensearch.index.engine.Engine.Operation.Origin.LOCAL_TRANSLOG_RECOVERY; -import static org.opensearch.index.engine.Engine.Operation.Origin.PEER_RECOVERY; -import static org.opensearch.index.engine.Engine.Operation.Origin.PRIMARY; -import static org.opensearch.index.engine.Engine.Operation.Origin.REPLICA; -import static org.opensearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED; -import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; -import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; -import static org.opensearch.index.translog.TranslogDeletionPolicies.createTranslogDeletionPolicy; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.sameInstance; import static org.hamcrest.Matchers.contains; @@ -230,6 +215,15 @@ import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import static org.opensearch.index.engine.Engine.Operation.Origin.LOCAL_RESET; +import static org.opensearch.index.engine.Engine.Operation.Origin.LOCAL_TRANSLOG_RECOVERY; +import static org.opensearch.index.engine.Engine.Operation.Origin.PEER_RECOVERY; +import static org.opensearch.index.engine.Engine.Operation.Origin.PRIMARY; +import static org.opensearch.index.engine.Engine.Operation.Origin.REPLICA; +import static org.opensearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED; +import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; +import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; +import static org.opensearch.index.translog.TranslogDeletionPolicies.createTranslogDeletionPolicy; public class InternalEngineTests extends EngineTestCase { @@ -303,7 +297,7 @@ public void testVersionMapAfterAutoIDDocument() throws IOException { if (operation.origin() == PRIMARY) { assertFalse("safe access should NOT be required last indexing round was only append only", engine.isSafeAccessRequired()); } - engine.delete(new Engine.Delete(operation.type(), operation.id(), operation.uid(), primaryTerm.get())); + engine.delete(new Engine.Delete(operation.id(), operation.uid(), primaryTerm.get())); assertTrue("safe access should be required", engine.isSafeAccessRequired()); engine.refresh("test"); assertTrue("safe access should be required", engine.isSafeAccessRequired()); @@ -477,7 +471,7 @@ public void testSegments() throws Exception { liveDocsFirstSegment.remove(idToUpdate); ParsedDocument doc = testParsedDocument(idToUpdate, null, testDocument(), B_1, null); if (randomBoolean()) { - engine.delete(new Engine.Delete(doc.type(), doc.id(), newUid(doc), primaryTerm.get())); + engine.delete(new Engine.Delete(doc.id(), newUid(doc), primaryTerm.get())); deletes++; } else { engine.index(indexForDoc(doc)); @@ -608,7 +602,6 @@ public void testTranslogMultipleOperationsSameDocument() throws IOException { initialEngine.index(operation); } else { final Engine.Delete operation = new Engine.Delete( - "test", "1", newUid(doc), UNASSIGNED_SEQ_NO, @@ -878,7 +871,7 @@ public void testSimpleOperations() throws Exception { searchResult.close(); // now delete - engine.delete(new Engine.Delete("test", "1", newUid(doc), primaryTerm.get())); + engine.delete(new Engine.Delete("1", newUid(doc), primaryTerm.get())); // its not deleted yet searchResult = engine.acquireSearcher("test"); @@ -1025,7 +1018,7 @@ public void testSearchResultRelease() throws Exception { // don't release the search result yet... // delete, refresh and do a new search, it should not be there - engine.delete(new Engine.Delete("test", "1", newUid(doc), primaryTerm.get())); + engine.delete(new Engine.Delete("1", newUid(doc), primaryTerm.get())); engine.refresh("test"); Engine.Searcher updateSearchResult = engine.acquireSearcher("test"); MatcherAssert.assertThat(updateSearchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0)); @@ -1086,9 +1079,9 @@ public void testSyncTranslogConcurrently() throws Exception { final CheckedRunnable checker = () -> { assertThat(engine.getTranslogStats().getUncommittedOperations(), equalTo(0)); assertThat(engine.getLastSyncedGlobalCheckpoint(), equalTo(globalCheckpoint.get())); - try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) { + try (GatedCloseable wrappedSafeCommit = engine.acquireSafeIndexCommit()) { SequenceNumbers.CommitInfo commitInfo = SequenceNumbers.loadSeqNoInfoFromLuceneCommit( - safeCommit.getIndexCommit().getUserData().entrySet() + wrappedSafeCommit.get().getUserData().entrySet() ); assertThat(commitInfo.localCheckpoint, equalTo(engine.getProcessedLocalCheckpoint())); } @@ -1305,7 +1298,7 @@ public void testVersionedUpdate() throws IOException { Engine.Index create = new Engine.Index(newUid(doc), primaryTerm.get(), doc, Versions.MATCH_DELETED); Engine.IndexResult indexResult = engine.index(create); assertThat(indexResult.getVersion(), equalTo(1L)); - try (Engine.GetResult get = engine.get(new Engine.Get(true, false, doc.type(), doc.id(), create.uid()), searcherFactory)) { + try (Engine.GetResult get = engine.get(new Engine.Get(true, false, doc.id(), create.uid()), searcherFactory)) { assertEquals(1, get.version()); } @@ -1313,7 +1306,7 @@ public void testVersionedUpdate() throws IOException { Engine.IndexResult update_1_result = engine.index(update_1); assertThat(update_1_result.getVersion(), equalTo(2L)); - try (Engine.GetResult get = engine.get(new Engine.Get(true, false, doc.type(), doc.id(), create.uid()), searcherFactory)) { + try (Engine.GetResult get = engine.get(new Engine.Get(true, false, doc.id(), create.uid()), searcherFactory)) { assertEquals(2, get.version()); } @@ -1321,7 +1314,7 @@ public void testVersionedUpdate() throws IOException { Engine.IndexResult update_2_result = engine.index(update_2); assertThat(update_2_result.getVersion(), equalTo(3L)); - try (Engine.GetResult get = engine.get(new Engine.Get(true, false, doc.type(), doc.id(), create.uid()), searcherFactory)) { + try (Engine.GetResult get = engine.get(new Engine.Get(true, false, doc.id(), create.uid()), searcherFactory)) { assertEquals(3, get.version()); } @@ -1341,8 +1334,7 @@ public void testGetIfSeqNoIfPrimaryTerm() throws IOException { } try ( Engine.GetResult get = engine.get( - new Engine.Get(true, true, doc.type(), doc.id(), create.uid()).setIfSeqNo(indexResult.getSeqNo()) - .setIfPrimaryTerm(primaryTerm.get()), + new Engine.Get(true, true, doc.id(), create.uid()).setIfSeqNo(indexResult.getSeqNo()).setIfPrimaryTerm(primaryTerm.get()), searcherFactory ) ) { @@ -1352,7 +1344,7 @@ public void testGetIfSeqNoIfPrimaryTerm() throws IOException { expectThrows( VersionConflictEngineException.class, () -> engine.get( - new Engine.Get(true, false, doc.type(), doc.id(), create.uid()).setIfSeqNo(indexResult.getSeqNo() + 1) + new Engine.Get(true, false, doc.id(), create.uid()).setIfSeqNo(indexResult.getSeqNo() + 1) .setIfPrimaryTerm(primaryTerm.get()), searcherFactory ) @@ -1361,7 +1353,7 @@ public void testGetIfSeqNoIfPrimaryTerm() throws IOException { expectThrows( VersionConflictEngineException.class, () -> engine.get( - new Engine.Get(true, false, doc.type(), doc.id(), create.uid()).setIfSeqNo(indexResult.getSeqNo()) + new Engine.Get(true, false, doc.id(), create.uid()).setIfSeqNo(indexResult.getSeqNo()) .setIfPrimaryTerm(primaryTerm.get() + 1), searcherFactory ) @@ -1370,7 +1362,7 @@ public void testGetIfSeqNoIfPrimaryTerm() throws IOException { final VersionConflictEngineException versionConflictEngineException = expectThrows( VersionConflictEngineException.class, () -> engine.get( - new Engine.Get(true, false, doc.type(), doc.id(), create.uid()).setIfSeqNo(indexResult.getSeqNo() + 1) + new Engine.Get(true, false, doc.id(), create.uid()).setIfSeqNo(indexResult.getSeqNo() + 1) .setIfPrimaryTerm(primaryTerm.get() + 1), searcherFactory ) @@ -1471,7 +1463,6 @@ public void testForceMergeWithSoftDeletesRetention() throws Exception { final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build(); final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata); final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); - final MapperService mapperService = createMapperService("test"); final Set liveDocs = new HashSet<>(); try ( Store store = createStore(); @@ -1488,7 +1479,7 @@ public void testForceMergeWithSoftDeletesRetention() throws Exception { for (int i = 0; i < numDocs; i++) { ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null); if (randomBoolean()) { - engine.delete(new Engine.Delete(doc.type(), doc.id(), newUid(doc.id()), primaryTerm.get())); + engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get())); liveDocs.remove(doc.id()); } if (randomBoolean()) { @@ -1505,12 +1496,12 @@ public void testForceMergeWithSoftDeletesRetention() throws Exception { globalCheckpoint.set(randomLongBetween(0, localCheckpoint)); engine.syncTranslog(); final long safeCommitCheckpoint; - try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) { - safeCommitCheckpoint = Long.parseLong(safeCommit.getIndexCommit().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)); + try (GatedCloseable wrappedSafeCommit = engine.acquireSafeIndexCommit()) { + safeCommitCheckpoint = Long.parseLong(wrappedSafeCommit.get().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)); } engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); - assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService); - Map ops = readAllOperationsInLucene(engine, mapperService).stream() + assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine); + Map ops = readAllOperationsInLucene(engine).stream() .collect(Collectors.toMap(Translog.Operation::seqNo, Function.identity())); for (long seqno = 0; seqno <= localCheckpoint; seqno++) { long minSeqNoToRetain = Math.min(globalCheckpoint.get() + 1 - retainedExtraOps, safeCommitCheckpoint + 1); @@ -1537,8 +1528,8 @@ public void testForceMergeWithSoftDeletesRetention() throws Exception { engine.syncTranslog(); engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); - assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService); - assertThat(readAllOperationsInLucene(engine, mapperService), hasSize(liveDocs.size())); + assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine); + assertThat(readAllOperationsInLucene(engine), hasSize(liveDocs.size())); } } @@ -1550,7 +1541,6 @@ public void testForceMergeWithSoftDeletesRetentionAndRecoverySource() throws Exc final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build(); final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata); final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); - final MapperService mapperService = createMapperService("test"); final boolean omitSourceAllTheTime = randomBoolean(); final Set liveDocs = new HashSet<>(); final Set liveDocsWithSource = new HashSet<>(); @@ -1574,7 +1564,7 @@ public void testForceMergeWithSoftDeletesRetentionAndRecoverySource() throws Exc boolean useRecoverySource = randomBoolean() || omitSourceAllTheTime; ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null, useRecoverySource); if (randomBoolean()) { - engine.delete(new Engine.Delete(doc.type(), doc.id(), newUid(doc.id()), primaryTerm.get())); + engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get())); liveDocs.remove(doc.id()); liveDocsWithSource.remove(doc.id()); } @@ -1595,15 +1585,15 @@ public void testForceMergeWithSoftDeletesRetentionAndRecoverySource() throws Exc globalCheckpoint.set(randomLongBetween(0, engine.getPersistedLocalCheckpoint())); engine.syncTranslog(); final long minSeqNoToRetain; - try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) { + try (GatedCloseable wrappedSafeCommit = engine.acquireSafeIndexCommit()) { long safeCommitLocalCheckpoint = Long.parseLong( - safeCommit.getIndexCommit().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY) + wrappedSafeCommit.get().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY) ); minSeqNoToRetain = Math.min(globalCheckpoint.get() + 1 - retainedExtraOps, safeCommitLocalCheckpoint + 1); } engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); - assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService); - Map ops = readAllOperationsInLucene(engine, mapperService).stream() + assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine); + Map ops = readAllOperationsInLucene(engine).stream() .collect(Collectors.toMap(Translog.Operation::seqNo, Function.identity())); for (long seqno = 0; seqno <= engine.getPersistedLocalCheckpoint(); seqno++) { String msg = "seq# [" + seqno + "], global checkpoint [" + globalCheckpoint + "], retained-ops [" + retainedExtraOps + "]"; @@ -1649,8 +1639,8 @@ public void testForceMergeWithSoftDeletesRetentionAndRecoverySource() throws Exc engine.syncTranslog(); } engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); - assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService); - assertThat(readAllOperationsInLucene(engine, mapperService), hasSize(liveDocsWithSource.size())); + assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine); + assertThat(readAllOperationsInLucene(engine), hasSize(liveDocsWithSource.size())); } } @@ -1826,7 +1816,6 @@ public void testConcurrentOutOfOrderDocsOnReplica() throws IOException, Interrup } else { Engine.Delete delete = (Engine.Delete) operation; return new Engine.Delete( - delete.type(), delete.id(), delete.uid(), newSeqNo, @@ -1930,7 +1919,6 @@ private int assertOpsOnPrimary(List ops, long currentOpVersion 0 ); BiFunction delWithVersion = (version, delete) -> new Engine.Delete( - delete.type(), delete.id(), delete.uid(), UNASSIGNED_SEQ_NO, @@ -1957,7 +1945,6 @@ private int assertOpsOnPrimary(List ops, long currentOpVersion term ); TriFunction delWithSeq = (seqNo, term, delete) -> new Engine.Delete( - delete.type(), delete.id(), delete.uid(), UNASSIGNED_SEQ_NO, @@ -1984,7 +1971,6 @@ private int assertOpsOnPrimary(List ops, long currentOpVersion index.getIfPrimaryTerm() ); Function deleteWithCurrentTerm = delete -> new Engine.Delete( - delete.type(), delete.id(), delete.uid(), UNASSIGNED_SEQ_NO, @@ -2231,7 +2217,7 @@ public void testVersioningPromotedReplica() throws IOException { final int opsOnPrimary = assertOpsOnPrimary(primaryOps, finalReplicaVersion, deletedOnReplica, replicaEngine); final long currentSeqNo = getSequenceID( replicaEngine, - new Engine.Get(false, false, "type", lastReplicaOp.uid().text(), lastReplicaOp.uid()) + new Engine.Get(false, false, lastReplicaOp.uid().text(), lastReplicaOp.uid()) ).v1(); try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { final TotalHitCountCollector collector = new TotalHitCountCollector(); @@ -2297,7 +2283,7 @@ class OpAndVersion { throw new AssertionError(e); } for (int op = 0; op < opsPerThread; op++) { - try (Engine.GetResult get = engine.get(new Engine.Get(true, false, doc.type(), doc.id(), uidTerm), searcherFactory)) { + try (Engine.GetResult get = engine.get(new Engine.Get(true, false, doc.id(), uidTerm), searcherFactory)) { FieldsVisitor visitor = new FieldsVisitor(true); get.docIdAndVersion().reader.document(get.docIdAndVersion().docId, visitor); List values = new ArrayList<>(Strings.commaDelimitedListToSet(visitor.source().utf8ToString())); @@ -2353,7 +2339,7 @@ class OpAndVersion { assertTrue(op.added + " should not exist", exists); } - try (Engine.GetResult get = engine.get(new Engine.Get(true, false, doc.type(), doc.id(), uidTerm), searcherFactory)) { + try (Engine.GetResult get = engine.get(new Engine.Get(true, false, doc.id(), uidTerm), searcherFactory)) { FieldsVisitor visitor = new FieldsVisitor(true); get.docIdAndVersion().reader.document(get.docIdAndVersion().docId, visitor); List values = Arrays.asList(Strings.commaDelimitedListToStringArray(visitor.source().utf8ToString())); @@ -2371,7 +2357,7 @@ public void testBasicCreatedFlag() throws IOException { indexResult = engine.index(index); assertFalse(indexResult.isCreated()); - engine.delete(new Engine.Delete("doc", "1", newUid(doc), primaryTerm.get())); + engine.delete(new Engine.Delete("1", newUid(doc), primaryTerm.get())); index = indexForDoc(doc); indexResult = engine.index(index); @@ -2503,7 +2489,6 @@ public void testSeqNoAndCheckpoints() throws IOException, InterruptedException { // we have some docs indexed, so delete one of them id = randomFrom(indexedIds); final Engine.Delete delete = new Engine.Delete( - "test", id, newUid(id), UNASSIGNED_SEQ_NO, @@ -2616,7 +2601,7 @@ public void testSeqNoAndCheckpoints() throws IOException, InterruptedException { // this test writes documents to the engine while concurrently flushing/commit // and ensuring that the commit points contain the correct sequence number data public void testConcurrentWritesAndCommits() throws Exception { - List commits = new ArrayList<>(); + List> commits = new ArrayList<>(); try ( Store store = createStore(); InternalEngine engine = createEngine(config(defaultSettings, store, createTempDir(), newMergePolicy(), null)) @@ -2671,8 +2656,8 @@ public void testConcurrentWritesAndCommits() throws Exception { // now, verify all the commits have the correct docs according to the user commit data long prevLocalCheckpoint = SequenceNumbers.NO_OPS_PERFORMED; long prevMaxSeqNo = SequenceNumbers.NO_OPS_PERFORMED; - for (Engine.IndexCommitRef commitRef : commits) { - final IndexCommit commit = commitRef.getIndexCommit(); + for (GatedCloseable wrappedCommit : commits) { + final IndexCommit commit = wrappedCommit.get(); Map userData = commit.getUserData(); long localCheckpoint = userData.containsKey(SequenceNumbers.LOCAL_CHECKPOINT_KEY) ? Long.parseLong(userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) @@ -2817,7 +2802,6 @@ public void testEnableGcDeletes() throws Exception { // Delete document we just added: engine.delete( new Engine.Delete( - "test", "1", newUid(doc), UNASSIGNED_SEQ_NO, @@ -2845,7 +2829,6 @@ public void testEnableGcDeletes() throws Exception { // Delete non-existent document engine.delete( new Engine.Delete( - "test", "2", newUid("2"), UNASSIGNED_SEQ_NO, @@ -2860,7 +2843,7 @@ public void testEnableGcDeletes() throws Exception { ); // Get should not find the document (we never indexed uid=2): - getResult = engine.get(new Engine.Get(true, false, "type", "2", newUid("2")), searcherFactory); + getResult = engine.get(new Engine.Get(true, false, "2", newUid("2")), searcherFactory); assertThat(getResult.exists(), equalTo(false)); // Try to index uid=1 with a too-old version, should fail: @@ -3234,15 +3217,6 @@ public void testSkipTranslogReplay() throws IOException { } } - private Mapping dynamicUpdate() { - BuilderContext context = new BuilderContext( - Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build(), - new ContentPath() - ); - final RootObjectMapper root = new RootObjectMapper.Builder("some_type").build(context); - return new Mapping(Version.CURRENT, root, new MetadataFieldMapper[0], emptyMap()); - } - private Path[] filterExtraFSFiles(Path[] files) { List paths = new ArrayList<>(); for (Path p : files) { @@ -3278,7 +3252,6 @@ public void testTranslogReplay() throws IOException { } assertVisibleCount(engine, numDocs); translogHandler = createTranslogHandler(engine.engineConfig.getIndexSettings()); - translogHandler.mappingUpdate = dynamicUpdate(); engine.close(); // we need to reuse the engine config unless the parser.mappingModified won't work @@ -3288,12 +3261,6 @@ public void testTranslogReplay() throws IOException { assertVisibleCount(engine, numDocs, false); assertEquals(numDocs, translogHandler.appliedOperations()); - if (translogHandler.mappingUpdate != null) { - assertEquals(1, translogHandler.getRecoveredTypes().size()); - assertTrue(translogHandler.getRecoveredTypes().containsKey("test")); - } else { - assertEquals(0, translogHandler.getRecoveredTypes().size()); - } engine.close(); translogHandler = createTranslogHandler(engine.engineConfig.getIndexSettings()); @@ -3358,7 +3325,7 @@ public void testTranslogReplay() throws IOException { assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L)); } assertEquals(flush ? 1 : 2, translogHandler.appliedOperations()); - engine.delete(new Engine.Delete("test", Integer.toString(randomId), newUid(doc), primaryTerm.get())); + engine.delete(new Engine.Delete(Integer.toString(randomId), newUid(doc), primaryTerm.get())); if (randomBoolean()) { engine.close(); engine = createEngine(store, primaryTranslogDir, inSyncGlobalCheckpointSupplier); @@ -3405,7 +3372,7 @@ public void testRecoverFromForeignTranslog() throws IOException { primaryTerm::get, seqNo -> {} ); - translog.add(new Translog.Index("test", "SomeBogusId", 0, primaryTerm.get(), "{}".getBytes(Charset.forName("UTF-8")))); + translog.add(new Translog.Index("SomeBogusId", 0, primaryTerm.get(), "{}".getBytes(Charset.forName("UTF-8")))); assertEquals(generation.translogFileGeneration, translog.currentFileGeneration()); translog.close(); @@ -3689,10 +3656,7 @@ public BytesRef binaryValue() { } // now the engine is closed check we respond correctly expectThrows(AlreadyClosedException.class, () -> engine.index(indexForDoc(doc1))); - expectThrows( - AlreadyClosedException.class, - () -> engine.delete(new Engine.Delete("test", "", newUid(doc1), primaryTerm.get())) - ); + expectThrows(AlreadyClosedException.class, () -> engine.delete(new Engine.Delete("", newUid(doc1), primaryTerm.get()))); expectThrows( AlreadyClosedException.class, () -> engine.noOp( @@ -3714,8 +3678,8 @@ public void testDeleteWithFatalError() throws Exception { try (Store store = createStore()) { EngineConfig.TombstoneDocSupplier tombstoneDocSupplier = new EngineConfig.TombstoneDocSupplier() { @Override - public ParsedDocument newDeleteTombstoneDoc(String type, String id) { - ParsedDocument parsedDocument = tombstoneDocSupplier().newDeleteTombstoneDoc(type, id); + public ParsedDocument newDeleteTombstoneDoc(String id) { + ParsedDocument parsedDocument = tombstoneDocSupplier().newDeleteTombstoneDoc(id); parsedDocument.rootDoc().add(new StoredField("foo", "bar") { // this is a hack to add a failure during store document which triggers a tragic event // and in turn fails the engine @@ -3736,10 +3700,7 @@ public ParsedDocument newNoopTombstoneDoc(String reason) { try (InternalEngine engine = createEngine(null, null, null, config)) { final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null); engine.index(indexForDoc(doc)); - expectThrows( - IllegalStateException.class, - () -> engine.delete(new Engine.Delete("test", "1", newUid("1"), primaryTerm.get())) - ); + expectThrows(IllegalStateException.class, () -> engine.delete(new Engine.Delete("1", newUid("1"), primaryTerm.get()))); assertTrue(engine.isClosed.get()); assertSame(tragicException, engine.failedEngine.get()); } @@ -3839,7 +3800,6 @@ public void testDoubleDeliveryReplicaAppendingAndDeleteOnly() throws IOException Engine.Index operation = appendOnlyReplica(doc, false, 1, randomIntBetween(0, 5)); Engine.Index retry = appendOnlyReplica(doc, true, 1, randomIntBetween(0, 5)); Engine.Delete delete = new Engine.Delete( - operation.type(), operation.id(), operation.uid(), Math.max(retry.seqNo(), operation.seqNo()) + 1, @@ -4000,7 +3960,7 @@ public void testDoubleDeliveryReplica() throws IOException { assertEquals(1, topDocs.totalHits.value); } if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) { - List ops = readAllOperationsInLucene(engine, createMapperService("test")); + List ops = readAllOperationsInLucene(engine); assertThat(ops.stream().map(o -> o.seqNo()).collect(Collectors.toList()), hasItem(20L)); } } @@ -4501,7 +4461,7 @@ public void afterRefresh(boolean didRefresh) throws IOException { } public void testSequenceIDs() throws Exception { - Tuple seqID = getSequenceID(engine, new Engine.Get(false, false, "type", "2", newUid("1"))); + Tuple seqID = getSequenceID(engine, new Engine.Get(false, false, "2", newUid("1"))); // Non-existent doc returns no seqnum and no primary term assertThat(seqID.v1(), equalTo(UNASSIGNED_SEQ_NO)); assertThat(seqID.v2(), equalTo(0L)); @@ -4597,7 +4557,6 @@ public void testLookupSeqNoByIdInLucene() throws Exception { } else { operations.add( new Engine.Delete( - doc.type(), doc.id(), EngineTestCase.newUid(doc), seqNo, @@ -4806,7 +4765,6 @@ public void testOutOfOrderSequenceNumbersWithVersionConflict() throws IOExceptio operations.add(index); } else { final Engine.Delete delete = new Engine.Delete( - "test", "1", uid, sequenceNumberSupplier.getAsLong(), @@ -4854,7 +4812,7 @@ public void testOutOfOrderSequenceNumbersWithVersionConflict() throws IOExceptio } assertThat(engine.getProcessedLocalCheckpoint(), equalTo(expectedLocalCheckpoint)); - try (Engine.GetResult result = engine.get(new Engine.Get(true, false, "type", "2", uid), searcherFactory)) { + try (Engine.GetResult result = engine.get(new Engine.Get(true, false, "2", uid), searcherFactory)) { assertThat(result.exists(), equalTo(exists)); } } @@ -4868,7 +4826,7 @@ public void testOutOfOrderSequenceNumbersWithVersionConflict() throws IOExceptio */ public void testVersionConflictIgnoreDeletedDoc() throws IOException { ParsedDocument doc = testParsedDocument("1", null, testDocument(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); - engine.delete(new Engine.Delete("test", "1", newUid("1"), 1)); + engine.delete(new Engine.Delete("1", newUid("1"), 1)); for (long seqNo : new long[] { 0, 1, randomNonNegativeLong() }) { assertDeletedVersionConflict( engine.index( @@ -4893,7 +4851,6 @@ public void testVersionConflictIgnoreDeletedDoc() throws IOException { assertDeletedVersionConflict( engine.delete( new Engine.Delete( - "test", "1", newUid("1"), UNASSIGNED_SEQ_NO, @@ -4973,8 +4930,7 @@ protected long doGenerateSeqNoForOperation(Operation operation) { assertThat(noOp.primaryTerm(), equalTo(primaryTerm.get())); assertThat(noOp.reason(), equalTo(reason)); if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) { - MapperService mapperService = createMapperService("test"); - List operationsFromLucene = readAllOperationsInLucene(noOpEngine, mapperService); + List operationsFromLucene = readAllOperationsInLucene(noOpEngine); assertThat(operationsFromLucene, hasSize(maxSeqNo + 2 - localCheckpoint)); // fills n gap and 2 manual noop. for (int i = 0; i < operationsFromLucene.size(); i++) { assertThat( @@ -4982,7 +4938,7 @@ protected long doGenerateSeqNoForOperation(Operation operation) { equalTo(new Translog.NoOp(localCheckpoint + 1 + i, primaryTerm.get(), "filling gaps")) ); } - assertConsistentHistoryBetweenTranslogAndLuceneIndex(noOpEngine, mapperService); + assertConsistentHistoryBetweenTranslogAndLuceneIndex(noOpEngine); } } finally { IOUtils.close(noOpEngine); @@ -5050,7 +5006,7 @@ public void testRandomOperations() throws Exception { } } if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) { - List operations = readAllOperationsInLucene(engine, createMapperService("test")); + List operations = readAllOperationsInLucene(engine); assertThat(operations, hasSize(numOps)); } } @@ -5207,7 +5163,7 @@ public void testRestoreLocalHistoryFromTranslog() throws IOException { equalTo(0) ); } - assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, createMapperService("test")); + assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine); } } @@ -5409,7 +5365,6 @@ public void testSeqNoGenerator() throws IOException { ) { final String id = "id"; final Field uidField = new Field("_id", id, IdFieldMapper.Defaults.FIELD_TYPE); - final String type = "type"; final Field versionField = new NumericDocValuesField("_version", 0); final SeqNoFieldMapper.SequenceIDFields seqID = SeqNoFieldMapper.SequenceIDFields.emptySeqID(); final ParseContext.Document document = new ParseContext.Document(); @@ -5423,7 +5378,6 @@ public void testSeqNoGenerator() throws IOException { versionField, seqID, id, - type, "routing", Collections.singletonList(document), source, @@ -5450,7 +5404,6 @@ public void testSeqNoGenerator() throws IOException { assertThat(seqNoGenerator.get(), equalTo(seqNo + 1)); final Engine.Delete delete = new Engine.Delete( - type, id, new Term("_id", parsedDocument.id()), UNASSIGNED_SEQ_NO, @@ -5577,7 +5530,7 @@ public void testConcurrentAppendUpdateAndRefresh() throws InterruptedException, Engine.Index operation = appendOnlyPrimary(doc, false, 1); engine.index(operation); if (rarely()) { - engine.delete(new Engine.Delete(operation.type(), operation.id(), operation.uid(), primaryTerm.get())); + engine.delete(new Engine.Delete(operation.id(), operation.uid(), primaryTerm.get())); numDeletes.incrementAndGet(); } else { doc = testParsedDocument( @@ -5620,7 +5573,7 @@ public void testAcquireIndexCommit() throws Exception { IOUtils.close(engine, store); store = createStore(); final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); - final Engine.IndexCommitRef snapshot; + final GatedCloseable wrappedSnapshot; final boolean closeSnapshotBeforeEngine = randomBoolean(); try (InternalEngine engine = createEngine(store, createTempDir(), globalCheckpoint::get)) { int numDocs = between(1, 20); @@ -5633,9 +5586,9 @@ public void testAcquireIndexCommit() throws Exception { final boolean flushFirst = randomBoolean(); final boolean safeCommit = randomBoolean(); if (safeCommit) { - snapshot = engine.acquireSafeIndexCommit(); + wrappedSnapshot = engine.acquireSafeIndexCommit(); } else { - snapshot = engine.acquireLastIndexCommit(flushFirst); + wrappedSnapshot = engine.acquireLastIndexCommit(flushFirst); } int moreDocs = between(1, 20); for (int i = 0; i < moreDocs; i++) { @@ -5644,13 +5597,13 @@ public void testAcquireIndexCommit() throws Exception { globalCheckpoint.set(numDocs + moreDocs - 1); engine.flush(); // check that we can still read the commit that we captured - try (IndexReader reader = DirectoryReader.open(snapshot.getIndexCommit())) { + try (IndexReader reader = DirectoryReader.open(wrappedSnapshot.get())) { assertThat(reader.numDocs(), equalTo(flushFirst && safeCommit == false ? numDocs : 0)); } assertThat(DirectoryReader.listCommits(engine.store.directory()), hasSize(2)); if (closeSnapshotBeforeEngine) { - snapshot.close(); + wrappedSnapshot.close(); // check it's clean up engine.flush(true, true); assertThat(DirectoryReader.listCommits(engine.store.directory()), hasSize(1)); @@ -5658,7 +5611,7 @@ public void testAcquireIndexCommit() throws Exception { } if (closeSnapshotBeforeEngine == false) { - snapshot.close(); // shouldn't throw AlreadyClosedException + wrappedSnapshot.close(); // shouldn't throw AlreadyClosedException } } @@ -5722,7 +5675,7 @@ public void testCleanupCommitsWhenReleaseSnapshot() throws Exception { } engine.flush(false, randomBoolean()); int numSnapshots = between(1, 10); - final List snapshots = new ArrayList<>(); + final List> snapshots = new ArrayList<>(); for (int i = 0; i < numSnapshots; i++) { snapshots.add(engine.acquireSafeIndexCommit()); // taking snapshots from the safe commit. } @@ -5837,12 +5790,10 @@ public void testShouldPeriodicallyFlushAfterMerge() throws Exception { assertThat(engine.getTranslog().stats().getUncommittedOperations(), equalTo(2)); engine.refresh("test"); engine.forceMerge(false, 1, false, false, false, UUIDs.randomBase64UUID()); - assertBusy( - () -> { - // the merge listner runs concurrently after the force merge returned - assertThat(engine.shouldPeriodicallyFlush(), equalTo(true)); - } - ); + assertBusy(() -> { + // the merge listner runs concurrently after the force merge returned + assertThat(engine.shouldPeriodicallyFlush(), equalTo(true)); + }); engine.flush(); assertThat(engine.shouldPeriodicallyFlush(), equalTo(false)); } @@ -5917,7 +5868,7 @@ public void testStressUpdateSameDocWhileGettingIt() throws IOException, Interrup ); // first index an append only document and then delete it. such that we have it in the tombstones engine.index(doc); - engine.delete(new Engine.Delete(doc.type(), doc.id(), doc.uid(), primaryTerm.get())); + engine.delete(new Engine.Delete(doc.id(), doc.uid(), primaryTerm.get())); // now index more append only docs and refresh so we re-enabel the optimization for unsafe version map ParsedDocument document1 = testParsedDocument(Integer.toString(1), null, testDocumentWithTextField(), SOURCE, null); @@ -5981,22 +5932,14 @@ public void testStressUpdateSameDocWhileGettingIt() throws IOException, Interrup Thread thread = new Thread(() -> { awaitStarted.countDown(); try ( - Engine.GetResult getResult = engine.get( - new Engine.Get(true, false, doc3.type(), doc3.id(), doc3.uid()), - engine::acquireSearcher - ) + Engine.GetResult getResult = engine.get(new Engine.Get(true, false, doc3.id(), doc3.uid()), engine::acquireSearcher) ) { assertTrue(getResult.exists()); } }); thread.start(); awaitStarted.await(); - try ( - Engine.GetResult getResult = engine.get( - new Engine.Get(true, false, doc.type(), doc.id(), doc.uid()), - engine::acquireSearcher - ) - ) { + try (Engine.GetResult getResult = engine.get(new Engine.Get(true, false, doc.id(), doc.uid()), engine::acquireSearcher)) { assertFalse(getResult.exists()); } thread.join(); @@ -6105,7 +6048,7 @@ public void testTrimUnsafeCommits() throws Exception { minTranslogGen = engine.getTranslog().getMinFileGeneration(); } - store.trimUnsafeCommits(globalCheckpoint.get(), minTranslogGen, config.getIndexSettings().getIndexVersionCreated()); + store.trimUnsafeCommits(config.getTranslogConfig().getTranslogPath()); long safeMaxSeqNo = commitMaxSeqNo.stream() .filter(s -> s <= globalCheckpoint.get()) .reduce((s1, s2) -> s2) // get the last one. @@ -6173,11 +6116,8 @@ public void testHistoryBasedOnSource() throws Exception { engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); } } - MapperService mapperService = createMapperService("test"); - List luceneOps = readAllOperationsBasedOnSource(engine, Engine.HistorySource.INDEX, mapperService); - List translogOps = readAllOperationsBasedOnSource(engine, Engine.HistorySource.TRANSLOG, mapperService); + List luceneOps = readAllOperationsBasedOnSource(engine); assertThat(luceneOps.stream().map(o -> o.seqNo()).collect(Collectors.toList()), containsInAnyOrder(expectedSeqNos.toArray())); - assertThat(translogOps.stream().map(o -> o.seqNo()).collect(Collectors.toList()), containsInAnyOrder(expectedSeqNos.toArray())); } } @@ -6242,10 +6182,9 @@ private void assertOperationHistoryInLucene(List operations) t engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); } } - MapperService mapperService = createMapperService("test"); - List actualOps = readAllOperationsInLucene(engine, mapperService); + List actualOps = readAllOperationsInLucene(engine); assertThat(actualOps.stream().map(o -> o.seqNo()).collect(Collectors.toList()), containsInAnyOrder(expectedSeqNos.toArray())); - assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService); + assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine); } } @@ -6328,17 +6267,15 @@ public void testKeepMinRetainedSeqNoByMergePolicy() throws IOException { if (rarely()) { engine.forceMerge(randomBoolean(), 1, false, false, false, UUIDs.randomBase64UUID()); } - try (Closeable ignored = engine.acquireHistoryRetentionLock(Engine.HistorySource.INDEX)) { + try (Closeable ignored = engine.acquireHistoryRetentionLock()) { long minRetainSeqNos = engine.getMinRetainedSeqNo(); assertThat(minRetainSeqNos, lessThanOrEqualTo(globalCheckpoint.get() + 1)); Long[] expectedOps = existingSeqNos.stream().filter(seqno -> seqno >= minRetainSeqNos).toArray(Long[]::new); - Set actualOps = readAllOperationsInLucene(engine, createMapperService("test")).stream() - .map(Translog.Operation::seqNo) - .collect(Collectors.toSet()); + Set actualOps = readAllOperationsInLucene(engine).stream().map(Translog.Operation::seqNo).collect(Collectors.toSet()); assertThat(actualOps, containsInAnyOrder(expectedOps)); } - try (Engine.IndexCommitRef commitRef = engine.acquireSafeIndexCommit()) { - IndexCommit safeCommit = commitRef.getIndexCommit(); + try (GatedCloseable wrappedSafeCommit = engine.acquireSafeIndexCommit()) { + IndexCommit safeCommit = wrappedSafeCommit.get(); if (safeCommit.getUserData().containsKey(Engine.MIN_RETAINED_SEQNO)) { lastMinRetainedSeqNo = Long.parseLong(safeCommit.getUserData().get(Engine.MIN_RETAINED_SEQNO)); } @@ -6381,7 +6318,6 @@ public void testLastRefreshCheckpoint() throws Exception { } public void testLuceneSnapshotRefreshesOnlyOnce() throws Exception { - final MapperService mapperService = createMapperService("test"); final long maxSeqNo = randomLongBetween(10, 50); final AtomicLong refreshCounter = new AtomicLong(); try ( @@ -6426,8 +6362,12 @@ public void onFailure(Exception e) { @Override protected void doRun() throws Exception { latch.await(); - Translog.Snapshot changes = engine.newChangesSnapshot("test", mapperService, min, max, true); - changes.close(); + if (randomBoolean()) { + Translog.Snapshot changes = engine.newChangesSnapshot("test", min, max, true, randomBoolean()); + changes.close(); + } else { + engine.countNumberOfHistoryOperations("test", min, max); + } } }); snapshotThreads[i].start(); @@ -6496,7 +6436,7 @@ public void testTrackMaxSeqNoOfUpdatesOrDeletesOnPrimary() throws Exception { ); } } else { - Engine.DeleteResult result = engine.delete(new Engine.Delete(doc.type(), doc.id(), newUid(doc.id()), primaryTerm.get())); + Engine.DeleteResult result = engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get())); liveDocIds.remove(doc.id()); assertThat( "delete operations on primary must advance max_seq_no_of_updates", @@ -6724,7 +6664,7 @@ public void testPruneAwayDeletedButRetainedIds() throws Exception { index(engine, i); } engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); - engine.delete(new Engine.Delete("_doc", "0", newUid("0"), primaryTerm.get())); + engine.delete(new Engine.Delete("0", newUid("0"), primaryTerm.get())); engine.refresh("test"); // now we have 2 segments since we now added a tombstone plus the old segment with the delete try (Engine.Searcher searcher = engine.acquireSearcher("test")) { @@ -6925,7 +6865,7 @@ private void runTestDeleteFailure(final CheckedBiConsumer new IllegalArgumentException("fatal")); final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> engine.delete(op)); @@ -7197,7 +7137,7 @@ public void testMaxDocsOnPrimary() throws Exception { operations.add(indexForDoc(createParsedDoc(id, null))); } else { id = "not_found"; - operations.add(new Engine.Delete("_doc", id, newUid(id), primaryTerm.get())); + operations.add(new Engine.Delete(id, newUid(id), primaryTerm.get())); } } for (int i = 0; i < numDocs; i++) { diff --git a/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java b/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java index ff569898b4910..e3117e179e7fa 100644 --- a/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java +++ b/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java @@ -59,7 +59,7 @@ public class LuceneChangesSnapshotTests extends EngineTestCase { @Before public void createMapper() throws Exception { - mapperService = createMapperService("test"); + mapperService = createMapperService(); } @Override @@ -74,14 +74,14 @@ public void testBasics() throws Exception { long fromSeqNo = randomNonNegativeLong(); long toSeqNo = randomLongBetween(fromSeqNo, Long.MAX_VALUE); // Empty engine - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", mapperService, fromSeqNo, toSeqNo, true)) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, true, randomBoolean())) { IllegalStateException error = expectThrows(IllegalStateException.class, () -> drainAll(snapshot)); assertThat( error.getMessage(), containsString("Not all operations between from_seqno [" + fromSeqNo + "] and to_seqno [" + toSeqNo + "] found") ); } - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", mapperService, fromSeqNo, toSeqNo, false)) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, false, randomBoolean())) { assertThat(snapshot, SnapshotMatchers.size(0)); } int numOps = between(1, 100); @@ -92,7 +92,7 @@ public void testBasics() throws Exception { if (randomBoolean()) { engine.index(indexForDoc(doc)); } else { - engine.delete(new Engine.Delete(doc.type(), doc.id(), newUid(doc.id()), primaryTerm.get())); + engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get())); } if (rarely()) { if (randomBoolean()) { @@ -111,11 +111,11 @@ public void testBasics() throws Exception { try ( Translog.Snapshot snapshot = new LuceneChangesSnapshot( searcher, - mapperService, between(1, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, toSeqNo, - false + false, + randomBoolean() ) ) { searcher = null; @@ -128,11 +128,11 @@ public void testBasics() throws Exception { try ( Translog.Snapshot snapshot = new LuceneChangesSnapshot( searcher, - mapperService, between(1, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, toSeqNo, - true + true, + randomBoolean() ) ) { searcher = null; @@ -151,11 +151,11 @@ public void testBasics() throws Exception { try ( Translog.Snapshot snapshot = new LuceneChangesSnapshot( searcher, - mapperService, between(1, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, toSeqNo, - false + false, + randomBoolean() ) ) { searcher = null; @@ -167,11 +167,11 @@ public void testBasics() throws Exception { try ( Translog.Snapshot snapshot = new LuceneChangesSnapshot( searcher, - mapperService, between(1, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, toSeqNo, - true + true, + randomBoolean() ) ) { searcher = null; @@ -188,11 +188,11 @@ public void testBasics() throws Exception { try ( Translog.Snapshot snapshot = new LuceneChangesSnapshot( searcher, - mapperService, between(1, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, toSeqNo, - true + true, + randomBoolean() ) ) { searcher = null; @@ -204,7 +204,7 @@ public void testBasics() throws Exception { // Get snapshot via engine will auto refresh fromSeqNo = randomLongBetween(0, numOps - 1); toSeqNo = randomLongBetween(fromSeqNo, numOps - 1); - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", mapperService, fromSeqNo, toSeqNo, randomBoolean())) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, randomBoolean(), randomBoolean())) { assertThat(snapshot, SnapshotMatchers.containsSeqNoRange(fromSeqNo, toSeqNo)); } } @@ -235,8 +235,11 @@ public void testSkipNonRootOfNestedDocuments() throws Exception { long maxSeqNo = engine.getLocalCheckpointTracker().getMaxSeqNo(); engine.refresh("test"); Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL); - try (Translog.Snapshot snapshot = new LuceneChangesSnapshot(searcher, mapperService, between(1, 100), 0, maxSeqNo, false)) { - assertThat(snapshot.totalOperations(), equalTo(seqNoToTerm.size())); + final boolean accurateCount = randomBoolean(); + try (Translog.Snapshot snapshot = new LuceneChangesSnapshot(searcher, between(1, 100), 0, maxSeqNo, false, accurateCount)) { + if (accurateCount == true) { + assertThat(snapshot.totalOperations(), equalTo(seqNoToTerm.size())); + } Translog.Operation op; while ((op = snapshot.next()) != null) { assertThat(op.toString(), op.primaryTerm(), equalTo(seqNoToTerm.get(op.seqNo()))); @@ -264,7 +267,7 @@ public void testUpdateAndReadChangesConcurrently() throws Exception { if (randomBoolean()) { op = new Engine.Index(newUid(doc), primaryTerm.get(), doc); } else { - op = new Engine.Delete(doc.type(), doc.id(), newUid(doc.id()), primaryTerm.get()); + op = new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get()); } } else { if (randomBoolean()) { @@ -311,7 +314,7 @@ void pullOperations(InternalEngine follower) throws IOException { long fromSeqNo = followerCheckpoint + 1; long batchSize = randomLongBetween(0, 100); long toSeqNo = Math.min(fromSeqNo + batchSize, leaderCheckpoint); - try (Translog.Snapshot snapshot = leader.newChangesSnapshot("test", mapperService, fromSeqNo, toSeqNo, true)) { + try (Translog.Snapshot snapshot = leader.newChangesSnapshot("test", fromSeqNo, toSeqNo, true, randomBoolean())) { translogHandler.run(follower, snapshot); } } @@ -327,7 +330,7 @@ public void run() { .getProcessedCheckpoint()) { pullOperations(engine); } - assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService); + assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine); // have to verify without source since we are randomly testing without _source List docsWithoutSourceOnFollower = getDocIds(engine, true).stream() .map(d -> new DocIdSeqNoAndSource(d.getId(), null, d.getSeqNo(), d.getPrimaryTerm(), d.getVersion())) @@ -357,7 +360,7 @@ private List drainAll(Translog.Snapshot snapshot) throws IOE public void testOverFlow() throws Exception { long fromSeqNo = randomLongBetween(0, 5); long toSeqNo = randomLongBetween(Long.MAX_VALUE - 5, Long.MAX_VALUE); - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", mapperService, fromSeqNo, toSeqNo, true)) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, true, randomBoolean())) { IllegalStateException error = expectThrows(IllegalStateException.class, () -> drainAll(snapshot)); assertThat( error.getMessage(), diff --git a/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java b/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java index 65b8a81b029c0..a015443979527 100644 --- a/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java @@ -33,6 +33,7 @@ package org.opensearch.index.engine; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.store.LockObtainFailedException; @@ -41,6 +42,7 @@ import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.cluster.routing.ShardRoutingState; import org.opensearch.cluster.routing.TestShardRouting; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; @@ -114,8 +116,8 @@ public void testNoopAfterRegularEngine() throws IOException { final NoOpEngine noOpEngine = new NoOpEngine(noOpConfig(INDEX_SETTINGS, store, primaryTranslogDir, tracker)); assertThat(noOpEngine.getPersistedLocalCheckpoint(), equalTo(localCheckpoint)); assertThat(noOpEngine.getSeqNoStats(100L).getMaxSeqNo(), equalTo(maxSeqNo)); - try (Engine.IndexCommitRef ref = noOpEngine.acquireLastIndexCommit(false)) { - try (IndexReader reader = DirectoryReader.open(ref.getIndexCommit())) { + try (GatedCloseable wrappedCommit = noOpEngine.acquireLastIndexCommit(false)) { + try (IndexReader reader = DirectoryReader.open(wrappedCommit.get())) { assertThat(reader.numDocs(), equalTo(docs)); } } @@ -150,7 +152,7 @@ public void testNoOpEngineStats() throws Exception { for (int i = 0; i < numDocs; i++) { if (randomBoolean()) { String delId = Integer.toString(i); - Engine.DeleteResult result = engine.delete(new Engine.Delete("_doc", delId, newUid(delId), primaryTerm.get())); + Engine.DeleteResult result = engine.delete(new Engine.Delete(delId, newUid(delId), primaryTerm.get())); assertTrue(result.isFound()); engine.syncTranslog(); // advance persisted local checkpoint globalCheckpoint.set(engine.getPersistedLocalCheckpoint()); diff --git a/server/src/test/java/org/opensearch/index/engine/ReadOnlyEngineTests.java b/server/src/test/java/org/opensearch/index/engine/ReadOnlyEngineTests.java index 609e972b2c026..95a2db9d74c38 100644 --- a/server/src/test/java/org/opensearch/index/engine/ReadOnlyEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/ReadOnlyEngineTests.java @@ -112,7 +112,7 @@ public void testReadOnlyEngine() throws Exception { for (int i = 0; i < numDocs; i++) { if (randomBoolean()) { String delId = Integer.toString(i); - engine.delete(new Engine.Delete("test", delId, newUid(delId), primaryTerm.get())); + engine.delete(new Engine.Delete(delId, newUid(delId), primaryTerm.get())); } if (rarely()) { engine.flush(); diff --git a/server/src/test/java/org/opensearch/index/fielddata/BinaryDVFieldDataTests.java b/server/src/test/java/org/opensearch/index/fielddata/BinaryDVFieldDataTests.java index 2854f556bf8d8..071366d7c3345 100644 --- a/server/src/test/java/org/opensearch/index/fielddata/BinaryDVFieldDataTests.java +++ b/server/src/test/java/org/opensearch/index/fielddata/BinaryDVFieldDataTests.java @@ -81,16 +81,16 @@ public void testDocValue() throws Exception { doc.endArray(); } doc.endObject(); - ParsedDocument d = mapper.parse(new SourceToParse("test", "test", "1", BytesReference.bytes(doc), XContentType.JSON)); + ParsedDocument d = mapper.parse(new SourceToParse("test", "1", BytesReference.bytes(doc), XContentType.JSON)); writer.addDocument(d.rootDoc()); BytesRef bytes1 = randomBytes(); doc = XContentFactory.jsonBuilder().startObject().field("field", bytes1.bytes, bytes1.offset, bytes1.length).endObject(); - d = mapper.parse(new SourceToParse("test", "test", "2", BytesReference.bytes(doc), XContentType.JSON)); + d = mapper.parse(new SourceToParse("test", "2", BytesReference.bytes(doc), XContentType.JSON)); writer.addDocument(d.rootDoc()); doc = XContentFactory.jsonBuilder().startObject().endObject(); - d = mapper.parse(new SourceToParse("test", "test", "3", BytesReference.bytes(doc), XContentType.JSON)); + d = mapper.parse(new SourceToParse("test", "3", BytesReference.bytes(doc), XContentType.JSON)); writer.addDocument(d.rootDoc()); // test remove duplicate value @@ -106,7 +106,7 @@ public void testDocValue() throws Exception { doc.endArray(); } doc.endObject(); - d = mapper.parse(new SourceToParse("test", "test", "4", BytesReference.bytes(doc), XContentType.JSON)); + d = mapper.parse(new SourceToParse("test", "4", BytesReference.bytes(doc), XContentType.JSON)); writer.addDocument(d.rootDoc()); IndexFieldData indexFieldData = getForField("field"); diff --git a/server/src/test/java/org/opensearch/index/fielddata/IndexFieldDataServiceTests.java b/server/src/test/java/org/opensearch/index/fielddata/IndexFieldDataServiceTests.java index aeaef91fd020e..d2f9125be996a 100644 --- a/server/src/test/java/org/opensearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/server/src/test/java/org/opensearch/index/fielddata/IndexFieldDataServiceTests.java @@ -143,7 +143,7 @@ public void testGetForFieldRuntimeField() { searchLookupSetOnce.set(searchLookup); return (IndexFieldData.Builder) (cache, breakerService) -> null; }); - SearchLookup searchLookup = new SearchLookup(null, null, null); + SearchLookup searchLookup = new SearchLookup(null, null); ifdService.getForField(ft, "qualified", () -> searchLookup); assertSame(searchLookup, searchLookupSetOnce.get().get()); } diff --git a/server/src/test/java/org/opensearch/index/fieldstats/FieldStatsProviderRefreshTests.java b/server/src/test/java/org/opensearch/index/fieldstats/FieldStatsProviderRefreshTests.java index e1a3cfb91af9c..d7cf873e133df 100644 --- a/server/src/test/java/org/opensearch/index/fieldstats/FieldStatsProviderRefreshTests.java +++ b/server/src/test/java/org/opensearch/index/fieldstats/FieldStatsProviderRefreshTests.java @@ -128,7 +128,7 @@ private void refreshIndex() { } private void indexDocument(String id, String sValue) { - IndexResponse response = client().prepareIndex("index", "type", id).setSource("s", sValue).get(); + IndexResponse response = client().prepareIndex("index").setId(id).setSource("s", sValue).get(); assertThat(response.status(), anyOf(equalTo(RestStatus.OK), equalTo(RestStatus.CREATED))); } } diff --git a/server/src/test/java/org/opensearch/index/get/GetResultTests.java b/server/src/test/java/org/opensearch/index/get/GetResultTests.java index 2c9cbe5edbd77..9519b83fa54b1 100644 --- a/server/src/test/java/org/opensearch/index/get/GetResultTests.java +++ b/server/src/test/java/org/opensearch/index/get/GetResultTests.java @@ -97,7 +97,6 @@ public void testToXContent() throws IOException { { GetResult getResult = new GetResult( "index", - "type", "id", 0, 1, @@ -109,16 +108,16 @@ public void testToXContent() throws IOException { ); String output = Strings.toString(getResult); assertEquals( - "{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":1,\"_seq_no\":0,\"_primary_term\":1," + "{\"_index\":\"index\",\"_id\":\"id\",\"_version\":1,\"_seq_no\":0,\"_primary_term\":1," + "\"metafield\":\"metavalue\",\"found\":true,\"_source\":{ \"field1\" : \"value1\", \"field2\":\"value2\"}," + "\"fields\":{\"field1\":[\"value1\"]}}", output ); } { - GetResult getResult = new GetResult("index", "type", "id", UNASSIGNED_SEQ_NO, 0, 1, false, null, null, null); + GetResult getResult = new GetResult("index", "id", UNASSIGNED_SEQ_NO, 0, 1, false, null, null, null); String output = Strings.toString(getResult); - assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"found\":false}", output); + assertEquals("{\"_index\":\"index\",\"_id\":\"id\",\"found\":false}", output); } } @@ -129,7 +128,6 @@ public void testToAndFromXContentEmbedded() throws Exception { // We don't expect to retrieve the index/type/id of the GetResult because they are not rendered // by the toXContentEmbedded method. GetResult expectedGetResult = new GetResult( - null, null, null, tuple.v2().getSeqNo(), @@ -166,7 +164,6 @@ public void testToXContentEmbedded() throws IOException { GetResult getResult = new GetResult( "index", - "type", "id", 0, 1, @@ -186,7 +183,7 @@ public void testToXContentEmbedded() throws IOException { } public void testToXContentEmbeddedNotFound() throws IOException { - GetResult getResult = new GetResult("index", "type", "id", UNASSIGNED_SEQ_NO, 0, 1, false, null, null, null); + GetResult getResult = new GetResult("index", "id", UNASSIGNED_SEQ_NO, 0, 1, false, null, null, null); BytesReference originalBytes = toXContentEmbedded(getResult, XContentType.JSON, false); assertEquals("{\"found\":false}", originalBytes.utf8ToString()); @@ -194,7 +191,7 @@ public void testToXContentEmbeddedNotFound() throws IOException { public void testSerializationNotFound() throws IOException { // serializes and deserializes with streamable, then prints back to xcontent - GetResult getResult = new GetResult("index", "type", "id", UNASSIGNED_SEQ_NO, 0, 1, false, null, null, null); + GetResult getResult = new GetResult("index", "id", UNASSIGNED_SEQ_NO, 0, 1, false, null, null, null); BytesStreamOutput out = new BytesStreamOutput(); getResult.writeTo(out); @@ -222,7 +219,6 @@ public void testEqualsAndHashcode() { public static GetResult copyGetResult(GetResult getResult) { return new GetResult( getResult.getIndex(), - getResult.getType(), getResult.getId(), getResult.getSeqNo(), getResult.getPrimaryTerm(), @@ -238,21 +234,6 @@ public static GetResult mutateGetResult(GetResult getResult) { List> mutations = new ArrayList<>(); mutations.add( () -> new GetResult( - randomUnicodeOfLength(15), - getResult.getType(), - getResult.getId(), - getResult.getSeqNo(), - getResult.getPrimaryTerm(), - getResult.getVersion(), - getResult.isExists(), - getResult.internalSourceRef(), - getResult.getFields(), - null - ) - ); - mutations.add( - () -> new GetResult( - getResult.getIndex(), randomUnicodeOfLength(15), getResult.getId(), getResult.getSeqNo(), @@ -267,7 +248,6 @@ public static GetResult mutateGetResult(GetResult getResult) { mutations.add( () -> new GetResult( getResult.getIndex(), - getResult.getType(), randomUnicodeOfLength(15), getResult.getSeqNo(), getResult.getPrimaryTerm(), @@ -281,7 +261,6 @@ public static GetResult mutateGetResult(GetResult getResult) { mutations.add( () -> new GetResult( getResult.getIndex(), - getResult.getType(), getResult.getId(), getResult.getSeqNo(), getResult.getPrimaryTerm(), @@ -295,7 +274,6 @@ public static GetResult mutateGetResult(GetResult getResult) { mutations.add( () -> new GetResult( getResult.getIndex(), - getResult.getType(), getResult.getId(), getResult.isExists() ? UNASSIGNED_SEQ_NO : getResult.getSeqNo(), getResult.isExists() ? 0 : getResult.getPrimaryTerm(), @@ -309,7 +287,6 @@ public static GetResult mutateGetResult(GetResult getResult) { mutations.add( () -> new GetResult( getResult.getIndex(), - getResult.getType(), getResult.getId(), getResult.getSeqNo(), getResult.getPrimaryTerm(), @@ -323,7 +300,6 @@ public static GetResult mutateGetResult(GetResult getResult) { mutations.add( () -> new GetResult( getResult.getIndex(), - getResult.getType(), getResult.getId(), getResult.getSeqNo(), getResult.getPrimaryTerm(), @@ -373,10 +349,9 @@ public static Tuple randomGetResult(XContentType xContentT version = -1; exists = false; } - GetResult getResult = new GetResult(index, type, id, seqNo, primaryTerm, version, exists, source, docFields, metaFields); + GetResult getResult = new GetResult(index, id, seqNo, primaryTerm, version, exists, source, docFields, metaFields); GetResult expectedGetResult = new GetResult( index, - type, id, seqNo, primaryTerm, diff --git a/server/src/test/java/org/opensearch/index/mapper/AllFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/AllFieldMapperTests.java index 90112fb74832a..625cfbb81f8bc 100644 --- a/server/src/test/java/org/opensearch/index/mapper/AllFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/AllFieldMapperTests.java @@ -57,7 +57,7 @@ public void testUpdateDefaultSearchAnalyzer() throws Exception { ); String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc").endObject().endObject()); indexService.mapperService().merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); - assertEquals(mapping, indexService.mapperService().documentMapper("_doc").mapping().toString()); + assertEquals(mapping, indexService.mapperService().documentMapper().mapping().toString()); } } diff --git a/server/src/test/java/org/opensearch/index/mapper/DataStreamFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/DataStreamFieldMapperTests.java index 3a10b5c422578..374b7ac9a5271 100644 --- a/server/src/test/java/org/opensearch/index/mapper/DataStreamFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/DataStreamFieldMapperTests.java @@ -76,7 +76,6 @@ public void testDeeplyNestedCustomTimestampField() throws Exception { ParsedDocument doc = mapper.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -97,7 +96,6 @@ public void testDeeplyNestedCustomTimestampField() throws Exception { mapper.parse( new SourceToParse( "test", - "_doc", "3", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -127,7 +125,6 @@ private void assertDataStreamFieldMapper(String mapping, String timestampFieldNa ParsedDocument doc = mapper.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes( XContentFactory.jsonBuilder().startObject().field(timestampFieldName, "2020-12-06T11:04:05.000Z").endObject() @@ -146,7 +143,6 @@ private void assertDataStreamFieldMapper(String mapping, String timestampFieldNa mapper.parse( new SourceToParse( "test", - "_doc", "2", BytesReference.bytes( XContentFactory.jsonBuilder().startObject().field("invalid-field-name", "2020-12-06T11:04:05.000Z").endObject() @@ -165,7 +161,6 @@ private void assertDataStreamFieldMapper(String mapping, String timestampFieldNa mapper.parse( new SourceToParse( "test", - "_doc", "3", BytesReference.bytes( XContentFactory.jsonBuilder() diff --git a/server/src/test/java/org/opensearch/index/mapper/DocumentParserTests.java b/server/src/test/java/org/opensearch/index/mapper/DocumentParserTests.java index 2e2e4990f2e65..0ad8dc3f138e0 100644 --- a/server/src/test/java/org/opensearch/index/mapper/DocumentParserTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/DocumentParserTests.java @@ -363,7 +363,7 @@ ObjectMapper createObjectMapper(MapperService mapperService, String name) { ParseContext context = new ParseContext.InternalParseContext( settings, mapperService.documentMapperParser(), - mapperService.documentMapper("type"), + mapperService.documentMapper(), null, null ); @@ -1058,33 +1058,33 @@ public void testSimpleMapper() throws Exception { public void testParseToJsonAndParse() throws Exception { String mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/simple/test-mapping.json"); MapperService mapperService = createMapperService(mapping(b -> {})); - merge("person", mapperService, mapping); + merge(MapperService.SINGLE_MAPPING_NAME, mapperService, mapping); String builtMapping = mapperService.documentMapper().mappingSource().string(); // reparse it - DocumentMapper builtDocMapper = createDocumentMapper("_doc", builtMapping); + DocumentMapper builtDocMapper = createDocumentMapper(MapperService.SINGLE_MAPPING_NAME, builtMapping); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/simple/test1.json")); - Document doc = builtDocMapper.parse(new SourceToParse("test", "_doc", "1", json, XContentType.JSON)).rootDoc(); + Document doc = builtDocMapper.parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc(); assertThat(doc.getBinaryValue(builtDocMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1"))); assertThat(doc.get(builtDocMapper.mappers().getMapper("name.first").name()), equalTo("fred")); } public void testSimpleParser() throws Exception { String mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/simple/test-mapping.json"); - DocumentMapper docMapper = createDocumentMapper("person", mapping); + DocumentMapper docMapper = createDocumentMapper(MapperService.SINGLE_MAPPING_NAME, mapping); assertThat((String) docMapper.meta().get("param1"), equalTo("value1")); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/simple/test1.json")); - Document doc = docMapper.parse(new SourceToParse("test", "_doc", "1", json, XContentType.JSON)).rootDoc(); + Document doc = docMapper.parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc(); assertThat(doc.getBinaryValue(docMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1"))); assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("fred")); } public void testSimpleParserNoTypeNoId() throws Exception { String mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/simple/test-mapping.json"); - DocumentMapper docMapper = createDocumentMapper("person", mapping); + DocumentMapper docMapper = createDocumentMapper(MapperService.SINGLE_MAPPING_NAME, mapping); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/simple/test1-notype-noid.json")); - Document doc = docMapper.parse(new SourceToParse("test", "_doc", "1", json, XContentType.JSON)).rootDoc(); + Document doc = docMapper.parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc(); assertThat(doc.getBinaryValue(docMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1"))); assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("fred")); } @@ -1092,12 +1092,12 @@ public void testSimpleParserNoTypeNoId() throws Exception { public void testAttributes() throws Exception { String mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/simple/test-mapping.json"); - DocumentMapper docMapper = createDocumentMapper("person", mapping); + DocumentMapper docMapper = createDocumentMapper(MapperService.SINGLE_MAPPING_NAME, mapping); assertThat((String) docMapper.meta().get("param1"), equalTo("value1")); String builtMapping = docMapper.mappingSource().string(); - DocumentMapper builtDocMapper = createDocumentMapper("_doc", builtMapping); + DocumentMapper builtDocMapper = createDocumentMapper(MapperService.SINGLE_MAPPING_NAME, builtMapping); assertThat((String) builtDocMapper.meta().get("param1"), equalTo("value1")); } @@ -1106,7 +1106,7 @@ public void testNoDocumentSent() throws Exception { BytesReference json = new BytesArray("".getBytes(StandardCharsets.UTF_8)); MapperParsingException e = expectThrows( MapperParsingException.class, - () -> docMapper.parse(new SourceToParse("test", "_doc", "1", json, XContentType.JSON)) + () -> docMapper.parse(new SourceToParse("test", "1", json, XContentType.JSON)) ); assertThat(e.getMessage(), equalTo("failed to parse, document is empty")); } @@ -1472,7 +1472,7 @@ public void testTypeless() throws IOException { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("foo") .field("type", "keyword") @@ -1481,7 +1481,7 @@ public void testTypeless() throws IOException { .endObject() .endObject() ); - DocumentMapper mapper = createDocumentMapper("type", mapping); + DocumentMapper mapper = createDocumentMapper(MapperService.SINGLE_MAPPING_NAME, mapping); ParsedDocument doc = mapper.parse(source(b -> b.field("foo", "1234"))); assertNull(doc.dynamicMappingsUpdate()); // no update since we reused the existing type diff --git a/server/src/test/java/org/opensearch/index/mapper/DynamicMappingTests.java b/server/src/test/java/org/opensearch/index/mapper/DynamicMappingTests.java index f40ffa600ba8c..dee5db4e31253 100644 --- a/server/src/test/java/org/opensearch/index/mapper/DynamicMappingTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/DynamicMappingTests.java @@ -366,7 +366,7 @@ private void doTestDefaultFloatingPointMappings(DocumentMapper mapper, XContentB .field("quux", "3.2") // float detected through numeric detection .endObject() ); - ParsedDocument parsedDocument = mapper.parse(new SourceToParse("index", "_doc", "id", source, builder.contentType())); + ParsedDocument parsedDocument = mapper.parse(new SourceToParse("index", "id", source, builder.contentType())); Mapping update = parsedDocument.dynamicMappingsUpdate(); assertNotNull(update); assertThat(((FieldMapper) update.root().getMapper("foo")).fieldType().typeName(), equalTo("float")); diff --git a/server/src/test/java/org/opensearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/opensearch/index/mapper/DynamicTemplatesTests.java index f5e4ea8b2aaa8..70b58525e2772 100644 --- a/server/src/test/java/org/opensearch/index/mapper/DynamicTemplatesTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/DynamicTemplatesTests.java @@ -76,7 +76,7 @@ public void testMatchTypeOnly() throws Exception { public void testSimple() throws Exception { String mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/dynamictemplate/simple/test-mapping.json"); - MapperService mapperService = createMapperService("person", mapping); + MapperService mapperService = createMapperService("_doc", mapping); String docJson = copyToStringFromClasspath("/org/opensearch/index/mapper/dynamictemplate/simple/test-data.json"); ParsedDocument parsedDoc = mapperService.documentMapper().parse(source(docJson)); @@ -131,7 +131,7 @@ public void testSimple() throws Exception { public void testSimpleWithXContentTraverse() throws Exception { String mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/dynamictemplate/simple/test-mapping.json"); - MapperService mapperService = createMapperService("person", mapping); + MapperService mapperService = createMapperService("_doc", mapping); String docJson = copyToStringFromClasspath("/org/opensearch/index/mapper/dynamictemplate/simple/test-data.json"); ParsedDocument parsedDoc = mapperService.documentMapper().parse(source(docJson)); diff --git a/server/src/test/java/org/opensearch/index/mapper/FieldFilterMapperPluginTests.java b/server/src/test/java/org/opensearch/index/mapper/FieldFilterMapperPluginTests.java index 87e3ba253bfe3..c0900cc40abff 100644 --- a/server/src/test/java/org/opensearch/index/mapper/FieldFilterMapperPluginTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/FieldFilterMapperPluginTests.java @@ -74,9 +74,7 @@ protected Collection> getPlugins() { public void putMappings() { assertAcked(client().admin().indices().prepareCreate("index1")); assertAcked(client().admin().indices().prepareCreate("filtered")); - assertAcked( - client().admin().indices().preparePutMapping("index1", "filtered").setType("_doc").setSource(TEST_ITEM, XContentType.JSON) - ); + assertAcked(client().admin().indices().preparePutMapping("index1", "filtered").setSource(TEST_ITEM, XContentType.JSON)); } public void testGetMappings() { @@ -95,20 +93,28 @@ public void testGetIndex() { public void testGetFieldMappings() { GetFieldMappingsResponse getFieldMappingsResponse = client().admin().indices().prepareGetFieldMappings().setFields("*").get(); - Map>> mappings = getFieldMappingsResponse.mappings(); + Map> mappings = getFieldMappingsResponse.mappings(); assertEquals(2, mappings.size()); assertFieldMappings(mappings.get("index1"), ALL_FLAT_FIELDS); assertFieldMappings(mappings.get("filtered"), FILTERED_FLAT_FIELDS); // double check that submitting the filtered mappings to an unfiltered index leads to the same get field mappings output // as the one coming from a filtered index with same mappings GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("filtered").get(); - ImmutableOpenMap filtered = getMappingsResponse.getMappings().get("filtered"); - assertAcked(client().admin().indices().prepareCreate("test").addMapping("_doc", filtered.get("_doc").getSourceAsMap())); + MappingMetadata filtered = getMappingsResponse.getMappings().get("filtered"); + assertAcked(client().admin().indices().prepareCreate("test").addMapping("_doc", filtered.getSourceAsMap())); GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings("test").setFields("*").get(); assertEquals(1, response.mappings().size()); assertFieldMappings(response.mappings().get("test"), FILTERED_FLAT_FIELDS); } + public void testGetNonExistentFieldMapping() { + GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings("index1").setFields("non-existent").get(); + Map> mappings = response.mappings(); + assertEquals(1, mappings.size()); + Map fieldmapping = mappings.get("index1"); + assertEquals(0, fieldmapping.size()); + } + public void testFieldCapabilities() { List allFields = new ArrayList<>(ALL_FLAT_FIELDS); allFields.addAll(ALL_OBJECT_FIELDS); @@ -121,8 +127,8 @@ public void testFieldCapabilities() { // double check that submitting the filtered mappings to an unfiltered index leads to the same field_caps output // as the one coming from a filtered index with same mappings GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("filtered").get(); - ImmutableOpenMap filteredMapping = getMappingsResponse.getMappings().get("filtered"); - assertAcked(client().admin().indices().prepareCreate("test").addMapping("_doc", filteredMapping.get("_doc").getSourceAsMap())); + MappingMetadata filteredMapping = getMappingsResponse.getMappings().get("filtered"); + assertAcked(client().admin().indices().prepareCreate("test").addMapping("_doc", filteredMapping.getSourceAsMap())); FieldCapabilitiesResponse test = client().fieldCaps(new FieldCapabilitiesRequest().fields("*").indices("test")).actionGet(); // properties.value is an object field in the new mapping filteredFields.add("properties.value"); @@ -144,11 +150,10 @@ private static void assertFieldCaps(FieldCapabilitiesResponse fieldCapabilitiesR } private static void assertFieldMappings( - Map> mappings, + Map actual, Collection expectedFields ) { - assertEquals(1, mappings.size()); - Map fields = new HashMap<>(mappings.get("_doc")); + Map fields = new HashMap<>(actual); Set builtInMetadataFields = IndicesModule.getBuiltInMetadataFields(); for (String field : builtInMetadataFields) { GetFieldMappingsResponse.FieldMappingMetadata fieldMappingMetadata = fields.remove(field); @@ -161,12 +166,12 @@ private static void assertFieldMappings( assertEquals("Some unexpected fields were returned: " + fields.keySet(), 0, fields.size()); } - private void assertExpectedMappings(ImmutableOpenMap> mappings) { + private void assertExpectedMappings(ImmutableOpenMap mappings) { assertEquals(2, mappings.size()); assertNotFiltered(mappings.get("index1")); - ImmutableOpenMap filtered = mappings.get("filtered"); + MappingMetadata filtered = mappings.get("filtered"); assertFiltered(filtered); - assertMappingsAreValid(filtered.get("_doc").getSourceAsMap()); + assertMappingsAreValid(filtered.getSourceAsMap()); } private void assertMappingsAreValid(Map sourceAsMap) { @@ -179,9 +184,7 @@ private void assertMappingsAreValid(Map sourceAsMap) { } @SuppressWarnings("unchecked") - private static void assertFiltered(ImmutableOpenMap mappings) { - assertEquals(1, mappings.size()); - MappingMetadata mappingMetadata = mappings.get("_doc"); + private static void assertFiltered(MappingMetadata mappingMetadata) { assertNotNull(mappingMetadata); Map sourceAsMap = mappingMetadata.getSourceAsMap(); assertEquals(4, sourceAsMap.size()); @@ -226,9 +229,7 @@ private static void assertFiltered(ImmutableOpenMap map } @SuppressWarnings("unchecked") - private static void assertNotFiltered(ImmutableOpenMap mappings) { - assertEquals(1, mappings.size()); - MappingMetadata mappingMetadata = mappings.get("_doc"); + private static void assertNotFiltered(MappingMetadata mappingMetadata) { assertNotNull(mappingMetadata); Map sourceAsMap = mappingMetadata.getSourceAsMap(); assertEquals(4, sourceAsMap.size()); diff --git a/server/src/test/java/org/opensearch/index/mapper/FieldNamesFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/FieldNamesFieldMapperTests.java index 117d66f50a178..639de9d314641 100644 --- a/server/src/test/java/org/opensearch/index/mapper/FieldNamesFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/FieldNamesFieldMapperTests.java @@ -110,7 +110,6 @@ public void testInjectIntoDocDuringParsing() throws Exception { ParsedDocument doc = defaultMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder().startObject().field("a", "100").startObject("b").field("c", 42).endObject().endObject() @@ -148,7 +147,6 @@ public void testExplicitEnabled() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()), XContentType.JSON @@ -179,7 +177,6 @@ public void testDisabled() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()), XContentType.JSON diff --git a/server/src/test/java/org/opensearch/index/mapper/GenericStoreDynamicTemplateTests.java b/server/src/test/java/org/opensearch/index/mapper/GenericStoreDynamicTemplateTests.java index 856b3b2cd2099..9c9c0440231de 100644 --- a/server/src/test/java/org/opensearch/index/mapper/GenericStoreDynamicTemplateTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/GenericStoreDynamicTemplateTests.java @@ -47,17 +47,16 @@ public class GenericStoreDynamicTemplateTests extends OpenSearchSingleNodeTestCa public void testSimple() throws Exception { String mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/dynamictemplate/genericstore/test-mapping.json"); IndexService index = createIndex("test"); - client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping, XContentType.JSON).get(); + client().admin().indices().preparePutMapping("test").setSource(mapping, XContentType.JSON).get(); MapperService mapperService = index.mapperService(); byte[] json = copyToBytesFromClasspath("/org/opensearch/index/mapper/dynamictemplate/genericstore/test-data.json"); ParsedDocument parsedDoc = mapperService.documentMapper() - .parse(new SourceToParse("test", "person", "1", new BytesArray(json), XContentType.JSON)); + .parse(new SourceToParse("test", "1", new BytesArray(json), XContentType.JSON)); client().admin() .indices() .preparePutMapping("test") - .setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON) .get(); Document doc = parsedDoc.rootDoc(); diff --git a/server/src/test/java/org/opensearch/index/mapper/IdFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/IdFieldMapperTests.java index 718e945042218..e897abad405d5 100644 --- a/server/src/test/java/org/opensearch/index/mapper/IdFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/IdFieldMapperTests.java @@ -72,7 +72,6 @@ public void testIncludeInObjectNotAllowed() throws Exception { docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("_id", "1").endObject()), XContentType.JSON @@ -91,7 +90,7 @@ public void testDefaults() throws IOException { Settings indexSettings = Settings.EMPTY; MapperService mapperService = createIndex("test", indexSettings).mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE); - ParsedDocument document = mapper.parse(new SourceToParse("index", "type", "id", new BytesArray("{}"), XContentType.JSON)); + ParsedDocument document = mapper.parse(new SourceToParse("index", "id", new BytesArray("{}"), XContentType.JSON)); IndexableField[] fields = document.rootDoc().getFields(IdFieldMapper.NAME); assertEquals(1, fields.length); assertEquals(IndexOptions.DOCS, fields[0].fieldType().indexOptions()); diff --git a/server/src/test/java/org/opensearch/index/mapper/IdFieldTypeTests.java b/server/src/test/java/org/opensearch/index/mapper/IdFieldTypeTests.java index f755fbcda54cc..8a37a72ab7be4 100644 --- a/server/src/test/java/org/opensearch/index/mapper/IdFieldTypeTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/IdFieldTypeTests.java @@ -42,9 +42,6 @@ import org.opensearch.test.OpenSearchTestCase; import org.mockito.Mockito; -import java.util.Collection; -import java.util.Collections; - public class IdFieldTypeTests extends OpenSearchTestCase { public void testRangeQuery() { @@ -70,16 +67,12 @@ public void testTermsQuery() { Mockito.when(context.indexVersionCreated()).thenReturn(indexSettings.getAsVersion(IndexMetadata.SETTING_VERSION_CREATED, null)); MapperService mapperService = Mockito.mock(MapperService.class); - Collection types = Collections.emptySet(); - Mockito.when(context.queryTypes()).thenReturn(types); Mockito.when(context.getMapperService()).thenReturn(mapperService); MappedFieldType ft = new IdFieldMapper.IdFieldType(() -> false); Query query = ft.termQuery("id", context); assertEquals(new TermInSetQuery("_id", Uid.encodeId("id")), query); - types = Collections.singleton("type"); - Mockito.when(context.queryTypes()).thenReturn(types); query = ft.termQuery("id", context); assertEquals(new TermInSetQuery("_id", Uid.encodeId("id")), query); } diff --git a/server/src/test/java/org/opensearch/index/mapper/IndexFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/IndexFieldMapperTests.java index b27eb54fbfe59..c4225cb576550 100644 --- a/server/src/test/java/org/opensearch/index/mapper/IndexFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/IndexFieldMapperTests.java @@ -63,7 +63,6 @@ public void testDefaultDisabledIndexMapper() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()), XContentType.JSON diff --git a/server/src/test/java/org/opensearch/index/mapper/IpRangeFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/IpRangeFieldMapperTests.java index 33306c5842674..07fa602272b3d 100644 --- a/server/src/test/java/org/opensearch/index/mapper/IpRangeFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/IpRangeFieldMapperTests.java @@ -79,7 +79,6 @@ public void testStoreCidr() throws Exception { ParsedDocument doc = mapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", entry.getKey()).endObject()), XContentType.JSON diff --git a/server/src/test/java/org/opensearch/index/mapper/IpRangeFieldTypeTests.java b/server/src/test/java/org/opensearch/index/mapper/IpRangeFieldTypeTests.java index 98edd61e2fc1f..c2c6293eec4bd 100644 --- a/server/src/test/java/org/opensearch/index/mapper/IpRangeFieldTypeTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/IpRangeFieldTypeTests.java @@ -46,7 +46,7 @@ public void testFetchSourceValue() throws IOException { Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); - RangeFieldMapper mapper = new RangeFieldMapper.Builder("field", RangeType.IP, true).build(context); + RangeFieldMapper mapper = new RangeFieldMapper.Builder("field", RangeType.IP, true, Version.V_EMPTY).build(context); Map range = org.opensearch.common.collect.Map.of("gte", "2001:db8:0:0:0:0:2:1"); assertEquals( Collections.singletonList(org.opensearch.common.collect.Map.of("gte", "2001:db8::2:1")), diff --git a/server/src/test/java/org/opensearch/index/mapper/JavaMultiFieldMergeTests.java b/server/src/test/java/org/opensearch/index/mapper/JavaMultiFieldMergeTests.java index 3905ac0969850..7e00a463124f1 100644 --- a/server/src/test/java/org/opensearch/index/mapper/JavaMultiFieldMergeTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/JavaMultiFieldMergeTests.java @@ -50,20 +50,20 @@ public void testMergeMultiField() throws Exception { String mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/multifield/merge/test-mapping1.json"); MapperService mapperService = createIndex("test").mapperService(); - mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertTrue(mapperService.fieldType("name").isSearchable()); assertThat(mapperService.fieldType("name.indexed"), nullValue()); BytesReference json = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject()); - Document doc = mapperService.documentMapper().parse(new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); + Document doc = mapperService.documentMapper().parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); assertThat(f, nullValue()); mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/multifield/merge/test-mapping2.json"); - mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertTrue(mapperService.fieldType("name").isSearchable()); @@ -72,14 +72,14 @@ public void testMergeMultiField() throws Exception { assertThat(mapperService.fieldType("name.not_indexed2"), nullValue()); assertThat(mapperService.fieldType("name.not_indexed3"), nullValue()); - doc = mapperService.documentMapper().parse(new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); + doc = mapperService.documentMapper().parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc(); f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); assertThat(f, notNullValue()); mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/multifield/merge/test-mapping3.json"); - mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertTrue(mapperService.fieldType("name").isSearchable()); @@ -89,7 +89,7 @@ public void testMergeMultiField() throws Exception { assertThat(mapperService.fieldType("name.not_indexed3"), nullValue()); mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/multifield/merge/test-mapping4.json"); - mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertTrue(mapperService.fieldType("name").isSearchable()); @@ -103,20 +103,20 @@ public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception { String mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/multifield/merge/test-mapping1.json"); MapperService mapperService = createIndex("test").mapperService(); - mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertTrue(mapperService.fieldType("name").isSearchable()); assertThat(mapperService.fieldType("name.indexed"), nullValue()); BytesReference json = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject()); - Document doc = mapperService.documentMapper().parse(new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); + Document doc = mapperService.documentMapper().parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); assertThat(f, nullValue()); mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/multifield/merge/upgrade1.json"); - mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertTrue(mapperService.fieldType("name").isSearchable()); @@ -125,14 +125,14 @@ public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception { assertThat(mapperService.fieldType("name.not_indexed2"), nullValue()); assertThat(mapperService.fieldType("name.not_indexed3"), nullValue()); - doc = mapperService.documentMapper().parse(new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); + doc = mapperService.documentMapper().parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc(); f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); assertThat(f, notNullValue()); mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/multifield/merge/upgrade2.json"); - mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertTrue(mapperService.fieldType("name").isSearchable()); @@ -143,7 +143,11 @@ public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception { mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/multifield/merge/upgrade3.json"); try { - mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(mapping), + MapperService.MergeReason.MAPPING_UPDATE + ); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("Cannot update parameter [index] from [true] to [false]")); diff --git a/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java index 376a5b6360d00..b58c0bf69c298 100644 --- a/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java @@ -33,7 +33,6 @@ package org.opensearch.index.mapper; import org.apache.lucene.analysis.TokenStream; -import org.opensearch.ExceptionsHelper; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.Strings; import org.opensearch.common.bytes.BytesReference; @@ -41,7 +40,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.env.Environment; import org.opensearch.index.IndexService; import org.opensearch.index.IndexSettings; @@ -66,7 +64,6 @@ import java.util.Collection; import java.util.Collections; import java.util.Map; -import java.util.concurrent.ExecutionException; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.instanceOf; @@ -80,32 +77,6 @@ protected Collection> getPlugins() { return Arrays.asList(InternalSettingsPlugin.class, ReloadableFilterPlugin.class); } - public void testTypeNameStartsWithIllegalDot() { - String index = "test-index"; - String type = ".test-type"; - String field = "field"; - IllegalArgumentException e = expectThrows( - IllegalArgumentException.class, - () -> { client().admin().indices().prepareCreate(index).addMapping(type, field, "type=text").execute().actionGet(); } - ); - assertTrue(e.getMessage(), e.getMessage().contains("mapping type name [.test-type] must not start with a '.'")); - } - - public void testTypeNameTooLong() { - String index = "text-index"; - String field = "field"; - String type = new String(new char[256]).replace("\0", "a"); - - MapperException e = expectThrows( - MapperException.class, - () -> { client().admin().indices().prepareCreate(index).addMapping(type, field, "type=text").execute().actionGet(); } - ); - assertTrue( - e.getMessage(), - e.getMessage().contains("mapping type name [" + type + "] is too long; limit is length 255 but was [256]") - ); - } - public void testTypeValidation() { InvalidTypeNameException e = expectThrows(InvalidTypeNameException.class, () -> MapperService.validateTypeName("_type")); assertEquals("mapping type name [_type] can't start with '_' unless it is called [_doc]", e.getMessage()); @@ -116,34 +87,6 @@ public void testTypeValidation() { MapperService.validateTypeName("_doc"); // no exception } - public void testIndexIntoDefaultMapping() throws Throwable { - // 1. test implicit index creation - ExecutionException e = expectThrows( - ExecutionException.class, - () -> client().prepareIndex("index1", MapperService.DEFAULT_MAPPING, "1").setSource("{}", XContentType.JSON).execute().get() - ); - Throwable throwable = ExceptionsHelper.unwrapCause(e.getCause()); - if (throwable instanceof IllegalArgumentException) { - assertEquals("It is forbidden to index into the default mapping [_default_]", throwable.getMessage()); - } else { - throw e; - } - - // 2. already existing index - IndexService indexService = createIndex("index2"); - e = expectThrows( - ExecutionException.class, - () -> { client().prepareIndex("index1", MapperService.DEFAULT_MAPPING, "2").setSource().execute().get(); } - ); - throwable = ExceptionsHelper.unwrapCause(e.getCause()); - if (throwable instanceof IllegalArgumentException) { - assertEquals("It is forbidden to index into the default mapping [_default_]", throwable.getMessage()); - } else { - throw e; - } - assertNull(indexService.mapperService().documentMapper(MapperService.DEFAULT_MAPPING)); - } - public void testPreflightUpdateDoesNotChangeMapping() throws Throwable { final MapperService mapperService = createIndex("test1").mapperService(); final CompressedXContent mapping = createMappingSpecifyingNumberOfFields(1); @@ -231,7 +174,7 @@ public void testPartitionedConstraints() { client().admin() .indices() .prepareCreate("test-index") - .addMapping("type", "{\"type\":{}}", XContentType.JSON) + .setMapping("{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{}}") .setSettings(Settings.builder().put("index.number_of_shards", 4).put("index.routing_partition_size", 2)) .execute() .actionGet(); @@ -243,7 +186,7 @@ public void testPartitionedConstraints() { client().admin() .indices() .prepareCreate("test-index") - .addMapping("type", "{\"type\":{\"_routing\":{\"required\":true}}}", XContentType.JSON) + .setMapping("{\"_routing\":{\"required\":true}}") .setSettings(Settings.builder().put("index.number_of_shards", 4).put("index.routing_partition_size", 2)) .execute() .actionGet() @@ -359,16 +302,6 @@ public void testTotalFieldsLimitWithFieldAlias() throws Throwable { assertEquals("Limit of total fields [" + numberOfNonAliasFields + "] has been exceeded", e.getMessage()); } - public void testDefaultMappingIsRejectedOn7() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_default_").endObject().endObject()); - MapperService mapperService = createIndex("test").mapperService(); - IllegalArgumentException e = expectThrows( - IllegalArgumentException.class, - () -> mapperService.merge("_default_", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE) - ); - assertEquals(MapperService.DEFAULT_MAPPING_ERROR_MESSAGE, e.getMessage()); - } - public void testFieldNameLengthLimit() throws Throwable { int maxFieldNameLength = randomIntBetween(25, 30); String testString = new String(new char[maxFieldNameLength + 1]).replace("\0", "a"); diff --git a/server/src/test/java/org/opensearch/index/mapper/MultiFieldTests.java b/server/src/test/java/org/opensearch/index/mapper/MultiFieldTests.java index c2c96737506d9..4027cf20baba8 100644 --- a/server/src/test/java/org/opensearch/index/mapper/MultiFieldTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/MultiFieldTests.java @@ -72,10 +72,11 @@ private void testMultiField(String mapping) throws Exception { IndexService indexService = createIndex("test"); MapperService mapperService = indexService.mapperService(); - indexService.mapperService().merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + indexService.mapperService() + .merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/multifield/test-data.json")); - Document doc = mapperService.documentMapper().parse(new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); + Document doc = mapperService.documentMapper().parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f.name(), equalTo("name")); @@ -139,7 +140,7 @@ private void testMultiField(String mapping) throws Exception { public void testBuildThenParse() throws Exception { IndexService indexService = createIndex("test"); DocumentMapper builderDocMapper = new DocumentMapper.Builder( - new RootObjectMapper.Builder("person").add( + new RootObjectMapper.Builder(MapperService.SINGLE_MAPPING_NAME).add( new TextFieldMapper.Builder("name", createDefaultIndexAnalyzers()).store(true) .addMultiField(new TextFieldMapper.Builder("indexed", createDefaultIndexAnalyzers()).index(true)) .addMultiField(new TextFieldMapper.Builder("not_indexed", createDefaultIndexAnalyzers()).index(false).store(true)) @@ -151,10 +152,10 @@ public void testBuildThenParse() throws Exception { // reparse it DocumentMapper docMapper = indexService.mapperService() .documentMapperParser() - .parse("person", new CompressedXContent(builtMapping)); + .parse(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(builtMapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/multifield/test-data.json")); - Document doc = docMapper.parse(new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); + Document doc = docMapper.parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f.name(), equalTo("name")); diff --git a/server/src/test/java/org/opensearch/index/mapper/NestedObjectMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/NestedObjectMapperTests.java index c456e3ee11e3e..fe3ce5da6c90a 100644 --- a/server/src/test/java/org/opensearch/index/mapper/NestedObjectMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/NestedObjectMapperTests.java @@ -86,7 +86,6 @@ public void testEmptyNested() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").nullField("nested1").endObject()), XContentType.JSON @@ -98,7 +97,6 @@ public void testEmptyNested() throws Exception { doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder().startObject().field("field", "value").startArray("nested").endArray().endObject() @@ -135,7 +133,6 @@ public void testSingleNested() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -161,7 +158,6 @@ public void testSingleNested() throws Exception { doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -230,7 +226,6 @@ public void testMultiNested() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -325,7 +320,6 @@ public void testMultiObjectAndNested1() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -421,7 +415,6 @@ public void testMultiObjectAndNested2() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -516,7 +509,6 @@ public void testMultiRootAndNested1() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -611,7 +603,6 @@ public void testMultipleLevelsIncludeRoot1() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -681,7 +672,6 @@ public void testMultipleLevelsIncludeRoot2() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -766,7 +756,6 @@ public void testMultipleLevelsIncludeRootWithMerge() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -822,7 +811,6 @@ public void testNestedArrayStrict() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -975,7 +963,7 @@ public void testLimitNestedDocsDefaultSettings() throws Exception { docBuilder.endArray(); } docBuilder.endObject(); - SourceToParse source1 = new SourceToParse("test1", "type", "1", BytesReference.bytes(docBuilder), XContentType.JSON); + SourceToParse source1 = new SourceToParse("test1", "1", BytesReference.bytes(docBuilder), XContentType.JSON); MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source1)); assertEquals( "The number of nested documents has exceeded the allowed limit of [" @@ -1020,7 +1008,7 @@ public void testLimitNestedDocs() throws Exception { docBuilder.endArray(); } docBuilder.endObject(); - SourceToParse source1 = new SourceToParse("test1", "type", "1", BytesReference.bytes(docBuilder), XContentType.JSON); + SourceToParse source1 = new SourceToParse("test1", "1", BytesReference.bytes(docBuilder), XContentType.JSON); ParsedDocument doc = docMapper.parse(source1); assertThat(doc.docs().size(), equalTo(3)); @@ -1037,7 +1025,7 @@ public void testLimitNestedDocs() throws Exception { docBuilder2.endArray(); } docBuilder2.endObject(); - SourceToParse source2 = new SourceToParse("test1", "type", "2", BytesReference.bytes(docBuilder2), XContentType.JSON); + SourceToParse source2 = new SourceToParse("test1", "2", BytesReference.bytes(docBuilder2), XContentType.JSON); MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source2)); assertEquals( "The number of nested documents has exceeded the allowed limit of [" @@ -1089,7 +1077,7 @@ public void testLimitNestedDocsMultipleNestedFields() throws Exception { docBuilder.endArray(); } docBuilder.endObject(); - SourceToParse source1 = new SourceToParse("test1", "type", "1", BytesReference.bytes(docBuilder), XContentType.JSON); + SourceToParse source1 = new SourceToParse("test1", "1", BytesReference.bytes(docBuilder), XContentType.JSON); ParsedDocument doc = docMapper.parse(source1); assertThat(doc.docs().size(), equalTo(3)); @@ -1111,7 +1099,7 @@ public void testLimitNestedDocsMultipleNestedFields() throws Exception { } docBuilder2.endObject(); - SourceToParse source2 = new SourceToParse("test1", "type", "2", BytesReference.bytes(docBuilder2), XContentType.JSON); + SourceToParse source2 = new SourceToParse("test1", "2", BytesReference.bytes(docBuilder2), XContentType.JSON); MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source2)); assertEquals( "The number of nested documents has exceeded the allowed limit of [" @@ -1148,7 +1136,7 @@ public void testMergeNestedMappings() throws IOException { String mapping1 = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("nested1") .field("type", "nested") @@ -1162,14 +1150,14 @@ public void testMergeNestedMappings() throws IOException { // cannot update `include_in_parent` dynamically MapperException e1 = expectThrows( MapperException.class, - () -> mapperService.merge("type", new CompressedXContent(mapping1), MergeReason.MAPPING_UPDATE) + () -> mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping1), MergeReason.MAPPING_UPDATE) ); assertEquals("the [include_in_parent] parameter can't be updated on a nested object mapping", e1.getMessage()); String mapping2 = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("nested1") .field("type", "nested") @@ -1183,7 +1171,7 @@ public void testMergeNestedMappings() throws IOException { // cannot update `include_in_root` dynamically MapperException e2 = expectThrows( MapperException.class, - () -> mapperService.merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE) + () -> mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE) ); assertEquals("the [include_in_root] parameter can't be updated on a nested object mapping", e2.getMessage()); } diff --git a/server/src/test/java/org/opensearch/index/mapper/NullValueObjectMappingTests.java b/server/src/test/java/org/opensearch/index/mapper/NullValueObjectMappingTests.java index 9085c637ef89e..95c21823bfcae 100644 --- a/server/src/test/java/org/opensearch/index/mapper/NullValueObjectMappingTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/NullValueObjectMappingTests.java @@ -65,7 +65,6 @@ public void testNullValueObject() throws IOException { ParsedDocument doc = defaultMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder().startObject().startObject("obj1").endObject().field("value1", "test1").endObject() @@ -79,7 +78,6 @@ public void testNullValueObject() throws IOException { doc = defaultMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField("obj1").field("value1", "test1").endObject()), XContentType.JSON @@ -91,7 +89,6 @@ public void testNullValueObject() throws IOException { doc = defaultMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() diff --git a/server/src/test/java/org/opensearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/ObjectMapperTests.java index c82f918e55240..079475d9f3554 100644 --- a/server/src/test/java/org/opensearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/ObjectMapperTests.java @@ -59,7 +59,6 @@ public void testDifferentInnerObjectTokenFailure() throws Exception { defaultMapper.parse( new SourceToParse( "test", - "type", "1", new BytesArray( " {\n" diff --git a/server/src/test/java/org/opensearch/index/mapper/PathMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/PathMapperTests.java index 9fbe349c609a2..ed5470b861811 100644 --- a/server/src/test/java/org/opensearch/index/mapper/PathMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/PathMapperTests.java @@ -46,7 +46,7 @@ public void testPathMapping() throws IOException { String mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/path/test-mapping.json"); DocumentMapper docMapper = createIndex("test").mapperService() .documentMapperParser() - .parse("person", new CompressedXContent(mapping)); + .parse(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping)); // test full name assertThat(docMapper.mappers().getMapper("first1"), nullValue()); diff --git a/server/src/test/java/org/opensearch/index/mapper/PathMatchDynamicTemplateTests.java b/server/src/test/java/org/opensearch/index/mapper/PathMatchDynamicTemplateTests.java index a2fa7c68f67f9..e98dc399b3b41 100644 --- a/server/src/test/java/org/opensearch/index/mapper/PathMatchDynamicTemplateTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/PathMatchDynamicTemplateTests.java @@ -47,17 +47,16 @@ public class PathMatchDynamicTemplateTests extends OpenSearchSingleNodeTestCase public void testSimple() throws Exception { String mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json"); IndexService index = createIndex("test"); - client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping, XContentType.JSON).get(); + client().admin().indices().preparePutMapping("test").setSource(mapping, XContentType.JSON).get(); MapperService mapperService = index.mapperService(); byte[] json = copyToBytesFromClasspath("/org/opensearch/index/mapper/dynamictemplate/pathmatch/test-data.json"); ParsedDocument parsedDoc = mapperService.documentMapper() - .parse(new SourceToParse("test", "person", "1", new BytesArray(json), XContentType.JSON)); + .parse(new SourceToParse("test", "1", new BytesArray(json), XContentType.JSON)); client().admin() .indices() .preparePutMapping("test") - .setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON) .get(); Document doc = parsedDoc.rootDoc(); diff --git a/server/src/test/java/org/opensearch/index/mapper/RangeFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/RangeFieldMapperTests.java index 31c90d380537d..0353173e25696 100644 --- a/server/src/test/java/org/opensearch/index/mapper/RangeFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/RangeFieldMapperTests.java @@ -41,6 +41,7 @@ import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.index.mapper.MapperService.MergeReason; import java.io.IOException; import java.net.InetAddress; @@ -374,4 +375,12 @@ public void testIllegalFormatField() throws Exception { assertThat(e.getMessage(), containsString("Invalid format: [[test_format]]: Unknown pattern letter: t")); } + public void testUpdatesWithSameMappings() throws Exception { + for (final String type : types()) { + final DocumentMapper mapper = createDocumentMapper(rangeFieldMapping(type, b -> { b.field("store", true); })); + + final Mapping mapping = mapper.mapping(); + mapper.merge(mapping, MergeReason.MAPPING_UPDATE); + } + } } diff --git a/server/src/test/java/org/opensearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java index aca124fcb8a93..0a01d86e76dea 100644 --- a/server/src/test/java/org/opensearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java @@ -74,7 +74,6 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws new CompressedXContent( Strings.toString( PutMappingRequest.buildFromSimplifiedDef( - "_doc", INTEGER_RANGE_FIELD_NAME, "type=integer_range", LONG_RANGE_FIELD_NAME, diff --git a/server/src/test/java/org/opensearch/index/mapper/RangeFieldTypeTests.java b/server/src/test/java/org/opensearch/index/mapper/RangeFieldTypeTests.java index c35830c5089ae..d4772f24cca93 100644 --- a/server/src/test/java/org/opensearch/index/mapper/RangeFieldTypeTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/RangeFieldTypeTests.java @@ -536,16 +536,17 @@ public void testFetchSourceValue() throws IOException { Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); - MappedFieldType longMapper = new RangeFieldMapper.Builder("field", RangeType.LONG, true).build(context).fieldType(); + MappedFieldType longMapper = new RangeFieldMapper.Builder("field", RangeType.LONG, true, Version.V_EMPTY).build(context) + .fieldType(); Map longRange = org.opensearch.common.collect.Map.of("gte", 3.14, "lt", "42.9"); assertEquals( Collections.singletonList(org.opensearch.common.collect.Map.of("gte", 3L, "lt", 42L)), fetchSourceValue(longMapper, longRange) ); - MappedFieldType dateMapper = new RangeFieldMapper.Builder("field", RangeType.DATE, true).format("yyyy/MM/dd||epoch_millis") - .build(context) - .fieldType(); + MappedFieldType dateMapper = new RangeFieldMapper.Builder("field", RangeType.DATE, true, Version.V_EMPTY).format( + "yyyy/MM/dd||epoch_millis" + ).build(context).fieldType(); Map dateRange = org.opensearch.common.collect.Map.of("lt", "1990/12/29", "gte", 597429487111L); assertEquals( Collections.singletonList(org.opensearch.common.collect.Map.of("lt", "1990/12/29", "gte", "1988/12/06")), @@ -557,14 +558,15 @@ public void testParseSourceValueWithFormat() throws IOException { Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); - MappedFieldType longMapper = new RangeFieldMapper.Builder("field", RangeType.LONG, true).build(context).fieldType(); + MappedFieldType longMapper = new RangeFieldMapper.Builder("field", RangeType.LONG, true, Version.V_EMPTY).build(context) + .fieldType(); Map longRange = org.opensearch.common.collect.Map.of("gte", 3.14, "lt", "42.9"); assertEquals( Collections.singletonList(org.opensearch.common.collect.Map.of("gte", 3L, "lt", 42L)), fetchSourceValue(longMapper, longRange) ); - MappedFieldType dateMapper = new RangeFieldMapper.Builder("field", RangeType.DATE, true).format("strict_date_time") + MappedFieldType dateMapper = new RangeFieldMapper.Builder("field", RangeType.DATE, true, Version.V_EMPTY).format("strict_date_time") .build(context) .fieldType(); Map dateRange = org.opensearch.common.collect.Map.of("lt", "1990-12-29T00:00:00.000Z"); diff --git a/server/src/test/java/org/opensearch/index/mapper/RoutingFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/RoutingFieldMapperTests.java index a56521476c2d8..92236ad34013b 100644 --- a/server/src/test/java/org/opensearch/index/mapper/RoutingFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/RoutingFieldMapperTests.java @@ -53,7 +53,6 @@ public void testRoutingMapper() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()), XContentType.JSON, @@ -75,7 +74,6 @@ public void testIncludeInObjectNotAllowed() throws Exception { docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("_routing", "foo").endObject()), XContentType.JSON diff --git a/server/src/test/java/org/opensearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/SourceFieldMapperTests.java index e37ef76ce9443..3cb16b452cbf4 100644 --- a/server/src/test/java/org/opensearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/SourceFieldMapperTests.java @@ -69,7 +69,6 @@ public void testNoFormat() throws Exception { ParsedDocument doc = documentMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()), XContentType.JSON @@ -82,7 +81,6 @@ public void testNoFormat() throws Exception { doc = documentMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.smileBuilder().startObject().field("field", "value").endObject()), XContentType.SMILE @@ -111,7 +109,6 @@ public void testIncludes() throws Exception { ParsedDocument doc = documentMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -156,7 +153,6 @@ public void testExcludes() throws Exception { ParsedDocument doc = documentMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -325,8 +321,8 @@ public void testSourceObjectContainsExtraTokens() throws Exception { .parse("type", new CompressedXContent(mapping)); try { - documentMapper.parse(new SourceToParse("test", "type", "1", new BytesArray("{}}"), XContentType.JSON)); // extra end object - // (invalid JSON) + documentMapper.parse(new SourceToParse("test", "1", new BytesArray("{}}"), XContentType.JSON)); // extra end object + // (invalid JSON) fail("Expected parse exception"); } catch (MapperParsingException e) { assertNotNull(e.getRootCause()); diff --git a/server/src/test/java/org/opensearch/index/mapper/StoredNumericValuesTests.java b/server/src/test/java/org/opensearch/index/mapper/StoredNumericValuesTests.java index f91120d4cf199..65776001381a0 100644 --- a/server/src/test/java/org/opensearch/index/mapper/StoredNumericValuesTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/StoredNumericValuesTests.java @@ -111,7 +111,6 @@ public void testBytesAndNumericRepresentation() throws Exception { ParsedDocument doc = mapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() diff --git a/server/src/test/java/org/opensearch/index/mapper/TypeFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/TypeFieldMapperTests.java index d61c25c5ec622..89eee655ca9d4 100644 --- a/server/src/test/java/org/opensearch/index/mapper/TypeFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/TypeFieldMapperTests.java @@ -73,7 +73,7 @@ public void testDocValuesSingleType() throws Exception { public static void testDocValues(Function createIndex) throws IOException { MapperService mapperService = createIndex.apply("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE); - ParsedDocument document = mapper.parse(new SourceToParse("index", "type", "id", new BytesArray("{}"), XContentType.JSON)); + ParsedDocument document = mapper.parse(new SourceToParse("index", "id", new BytesArray("{}"), XContentType.JSON)); Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig()); @@ -100,7 +100,7 @@ public void testDefaults() throws IOException { Settings indexSettings = Settings.EMPTY; MapperService mapperService = createIndex("test", indexSettings).mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE); - ParsedDocument document = mapper.parse(new SourceToParse("index", "type", "id", new BytesArray("{}"), XContentType.JSON)); + ParsedDocument document = mapper.parse(new SourceToParse("index", "id", new BytesArray("{}"), XContentType.JSON)); assertEquals(Collections.emptyList(), Arrays.asList(document.rootDoc().getFields(TypeFieldMapper.NAME))); } } diff --git a/server/src/test/java/org/opensearch/index/mapper/UpdateMappingTests.java b/server/src/test/java/org/opensearch/index/mapper/UpdateMappingTests.java index 42a37c0b2ec1a..80fd4edc6ac78 100644 --- a/server/src/test/java/org/opensearch/index/mapper/UpdateMappingTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/UpdateMappingTests.java @@ -63,16 +63,13 @@ public void testConflictFieldsMapping(String fieldName) throws Exception { // test store, ... all the parameters that are not to be changed just like in other fields XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject(fieldName) .field("enabled", true) .field("store", false) .endObject() - .endObject() .endObject(); XContentBuilder mappingUpdate = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject(fieldName) .field("enabled", true) .field("store", true) @@ -82,65 +79,73 @@ public void testConflictFieldsMapping(String fieldName) throws Exception { .field("type", "text") .endObject() .endObject() - .endObject() .endObject(); testConflictWhileMergingAndMappingUnchanged(mapping, mappingUpdate); } protected void testConflictWhileMergingAndMappingUnchanged(XContentBuilder mapping, XContentBuilder mappingUpdate) throws IOException { - IndexService indexService = createIndex("test", Settings.builder().build(), "type", mapping); - CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource(); + IndexService indexService = createIndex("test", Settings.builder().build(), MapperService.SINGLE_MAPPING_NAME, mapping); + CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper().mappingSource(); // simulate like in MetadataMappingService#putMapping try { indexService.mapperService() - .merge("type", new CompressedXContent(BytesReference.bytes(mappingUpdate)), MapperService.MergeReason.MAPPING_UPDATE); + .merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(BytesReference.bytes(mappingUpdate)), + MapperService.MergeReason.MAPPING_UPDATE + ); fail(); } catch (IllegalArgumentException e) { // expected } // make sure simulate flag actually worked - no mappings applied - CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource(); + CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper().mappingSource(); assertThat(mappingAfterUpdate, equalTo(mappingBeforeUpdate)); } public void testConflictSameType() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("foo") .field("type", "long") .endObject() .endObject() - .endObject() .endObject(); - MapperService mapperService = createIndex("test", Settings.builder().build(), "type", mapping).mapperService(); + MapperService mapperService = createIndex("test", Settings.builder().build(), MapperService.SINGLE_MAPPING_NAME, mapping) + .mapperService(); XContentBuilder update = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("foo") .field("type", "double") .endObject() .endObject() - .endObject() .endObject(); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> mapperService.merge("type", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE) + () -> mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(Strings.toString(update)), + MapperService.MergeReason.MAPPING_UPDATE + ) ); assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]")); e = expectThrows( IllegalArgumentException.class, - () -> mapperService.merge("type", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE) + () -> mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(Strings.toString(update)), + MapperService.MergeReason.MAPPING_UPDATE + ) ); assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]")); assertThat( - ((FieldMapper) mapperService.documentMapper("type").mapping().root().getMapper("foo")).fieldType().typeName(), + ((FieldMapper) mapperService.documentMapper().mapping().root().getMapper("foo")).fieldType().typeName(), equalTo("long") ); } @@ -148,35 +153,36 @@ public void testConflictSameType() throws Exception { public void testConflictNewType() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("foo") .field("type", "long") .endObject() .endObject() - .endObject() .endObject(); - MapperService mapperService = createIndex("test", Settings.builder().build(), "type", mapping).mapperService(); + MapperService mapperService = createIndex("test", Settings.builder().build(), MapperService.SINGLE_MAPPING_NAME, mapping) + .mapperService(); XContentBuilder update = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("foo") .field("type", "double") .endObject() .endObject() - .endObject() .endObject(); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> mapperService.merge("type", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE) + () -> mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(Strings.toString(update)), + MapperService.MergeReason.MAPPING_UPDATE + ) ); assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]")); assertThat( - ((FieldMapper) mapperService.documentMapper("type").mapping().root().getMapper("foo")).fieldType().typeName(), + ((FieldMapper) mapperService.documentMapper().mapping().root().getMapper("foo")).fieldType().typeName(), equalTo("long") ); } @@ -184,25 +190,31 @@ public void testConflictNewType() throws Exception { public void testReuseMetaField() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("_id") .field("type", "text") .endObject() .endObject() - .endObject() .endObject(); MapperService mapperService = createIndex("test", Settings.builder().build()).mapperService(); MapperParsingException e = expectThrows( MapperParsingException.class, - () -> mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE) + () -> mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(Strings.toString(mapping)), + MapperService.MergeReason.MAPPING_UPDATE + ) ); assertThat(e.getMessage(), containsString("Field [_id] is defined more than once")); MapperParsingException e2 = expectThrows( MapperParsingException.class, - () -> mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE) + () -> mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(Strings.toString(mapping)), + MapperService.MergeReason.MAPPING_UPDATE + ) ); assertThat(e2.getMessage(), containsString("Field [_id] is defined more than once")); } @@ -211,53 +223,48 @@ public void testRejectFieldDefinedTwice() throws IOException { String mapping1 = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("foo") .field("type", "object") .endObject() .endObject() .endObject() - .endObject() ); String mapping2 = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("foo") .field("type", "long") .endObject() .endObject() .endObject() - .endObject() ); MapperService mapperService1 = createIndex("test1").mapperService(); - mapperService1.merge("type", new CompressedXContent(mapping1), MergeReason.MAPPING_UPDATE); + mapperService1.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping1), MergeReason.MAPPING_UPDATE); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> mapperService1.merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE) + () -> mapperService1.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE) ); assertThat(e.getMessage(), equalTo("can't merge a non object mapping [foo] with an object mapping")); MapperService mapperService2 = createIndex("test2").mapperService(); - mapperService2.merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE); + mapperService2.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE); e = expectThrows( IllegalArgumentException.class, - () -> mapperService2.merge("type", new CompressedXContent(mapping1), MergeReason.MAPPING_UPDATE) + () -> mapperService2.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping1), MergeReason.MAPPING_UPDATE) ); assertThat(e.getMessage(), equalTo("can't merge a non object mapping [foo] with an object mapping")); } public void testMappingVersion() { - createIndex("test", client().admin().indices().prepareCreate("test").addMapping("type")); + createIndex("test", client().admin().indices().prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME)); final ClusterService clusterService = getInstanceFromNode(ClusterService.class); { final long previousVersion = clusterService.state().metadata().index("test").getMappingVersion(); final PutMappingRequest request = new PutMappingRequest(); request.indices("test"); - request.type("type"); request.source("field", "type=text"); client().admin().indices().putMapping(request).actionGet(); assertThat(clusterService.state().metadata().index("test").getMappingVersion(), Matchers.equalTo(1 + previousVersion)); @@ -267,7 +274,6 @@ public void testMappingVersion() { final long previousVersion = clusterService.state().metadata().index("test").getMappingVersion(); final PutMappingRequest request = new PutMappingRequest(); request.indices("test"); - request.type("type"); request.source("field", "type=text"); client().admin().indices().putMapping(request).actionGet(); // the version should be unchanged after putting the same mapping again diff --git a/server/src/test/java/org/opensearch/index/query/CombineIntervalsSourceProviderTests.java b/server/src/test/java/org/opensearch/index/query/CombineIntervalsSourceProviderTests.java index ddc985356f268..4d5380221aa82 100644 --- a/server/src/test/java/org/opensearch/index/query/CombineIntervalsSourceProviderTests.java +++ b/server/src/test/java/org/opensearch/index/query/CombineIntervalsSourceProviderTests.java @@ -53,7 +53,7 @@ protected Combine createTestInstance() { @Override protected Combine mutateInstance(Combine instance) throws IOException { List subSources = instance.getSubSources(); - boolean ordered = instance.isOrdered(); + IntervalMode mode = instance.getMode(); int maxGaps = instance.getMaxGaps(); IntervalsSourceProvider.IntervalFilter filter = instance.getFilter(); switch (between(0, 3)) { @@ -63,7 +63,13 @@ protected Combine mutateInstance(Combine instance) throws IOException { : null; break; case 1: - ordered = !ordered; + if (mode == IntervalMode.ORDERED) { + mode = randomBoolean() ? IntervalMode.UNORDERED : IntervalMode.UNORDERED_NO_OVERLAP; + } else if (mode == IntervalMode.UNORDERED) { + mode = randomBoolean() ? IntervalMode.ORDERED : IntervalMode.UNORDERED_NO_OVERLAP; + } else { + mode = randomBoolean() ? IntervalMode.UNORDERED : IntervalMode.ORDERED; + } break; case 2: maxGaps++; @@ -76,7 +82,7 @@ protected Combine mutateInstance(Combine instance) throws IOException { default: throw new AssertionError("Illegal randomisation branch"); } - return new Combine(subSources, ordered, maxGaps, filter); + return new Combine(subSources, mode, maxGaps, filter); } @Override diff --git a/server/src/test/java/org/opensearch/index/query/CommonTermsQueryParserTests.java b/server/src/test/java/org/opensearch/index/query/CommonTermsQueryParserTests.java index 1723f402e6774..1dcda74308f45 100644 --- a/server/src/test/java/org/opensearch/index/query/CommonTermsQueryParserTests.java +++ b/server/src/test/java/org/opensearch/index/query/CommonTermsQueryParserTests.java @@ -45,7 +45,7 @@ public void testWhenParsedQueryIsNullNoNullPointerExceptionIsThrown() { CommonTermsQueryBuilder commonTermsQueryBuilder = new CommonTermsQueryBuilder("name", "the").queryName("query-name"); // the named query parses to null; we are testing this does not cause a NullPointerException - SearchResponse response = client().prepareSearch(index).setTypes(type).setQuery(commonTermsQueryBuilder).execute().actionGet(); + SearchResponse response = client().prepareSearch(index).setQuery(commonTermsQueryBuilder).execute().actionGet(); assertNotNull(response); assertEquals(response.getHits().getHits().length, 0); diff --git a/server/src/test/java/org/opensearch/index/query/GeoShapeQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/GeoShapeQueryBuilderTests.java index 9aac0e033dcef..05fee1c043557 100644 --- a/server/src/test/java/org/opensearch/index/query/GeoShapeQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/GeoShapeQueryBuilderTests.java @@ -51,7 +51,6 @@ import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.index.get.GetResult; -import org.opensearch.index.mapper.MapperService; import org.opensearch.test.AbstractQueryTestCase; import org.opensearch.test.VersionUtils; import org.opensearch.test.geo.RandomShapeGenerator; @@ -70,7 +69,6 @@ public abstract class GeoShapeQueryBuilderTests extends AbstractQueryTestCase { protected static String indexedShapeId; - protected static String indexedShapeType; protected static String indexedShapePath; protected static String indexedShapeIndex; protected static String indexedShapeRouting; @@ -94,12 +92,9 @@ protected GeoShapeQueryBuilder doCreateTestQueryBuilder() { @Override protected GetResponse executeGet(GetRequest getRequest) { - String indexedType = indexedShapeType != null ? indexedShapeType : MapperService.SINGLE_MAPPING_NAME; - assertThat(indexedShapeToReturn, notNullValue()); assertThat(indexedShapeId, notNullValue()); assertThat(getRequest.id(), equalTo(indexedShapeId)); - assertThat(getRequest.type(), equalTo(indexedType)); assertThat(getRequest.routing(), equalTo(indexedShapeRouting)); String expectedShapeIndex = indexedShapeIndex == null ? GeoShapeQueryBuilder.DEFAULT_SHAPE_INDEX_NAME : indexedShapeIndex; assertThat(getRequest.index(), equalTo(expectedShapeIndex)); @@ -116,16 +111,13 @@ protected GetResponse executeGet(GetRequest getRequest) { } catch (IOException ex) { throw new OpenSearchException("boom", ex); } - return new GetResponse( - new GetResult(indexedShapeIndex, indexedType, indexedShapeId, 0, 1, 0, true, new BytesArray(json), null, null) - ); + return new GetResponse(new GetResult(indexedShapeIndex, indexedShapeId, 0, 1, 0, true, new BytesArray(json), null, null)); } @After public void clearShapeFields() { indexedShapeToReturn = null; indexedShapeId = null; - indexedShapeType = null; indexedShapePath = null; indexedShapeIndex = null; indexedShapeRouting = null; @@ -151,10 +143,7 @@ public void testNoShape() throws IOException { } public void testNoIndexedShape() throws IOException { - IllegalArgumentException e = expectThrows( - IllegalArgumentException.class, - () -> new GeoShapeQueryBuilder(fieldName(), null, "type") - ); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new GeoShapeQueryBuilder(fieldName(), null, null)); assertEquals("either shape or indexedShapeId is required", e.getMessage()); } @@ -265,11 +254,6 @@ public void testSerializationFailsUnlessFetched() throws IOException { protected QueryBuilder parseQuery(XContentParser parser) throws IOException { QueryBuilder query = super.parseQuery(parser); assertThat(query, instanceOf(GeoShapeQueryBuilder.class)); - - GeoShapeQueryBuilder shapeQuery = (GeoShapeQueryBuilder) query; - if (shapeQuery.indexedShapeType() != null) { - assertWarnings(GeoShapeQueryBuilder.TYPES_DEPRECATION_MESSAGE); - } return query; } } diff --git a/server/src/test/java/org/opensearch/index/query/IdsQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/IdsQueryBuilderTests.java index e92395a1b27f2..6e03acb68e204 100644 --- a/server/src/test/java/org/opensearch/index/query/IdsQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/IdsQueryBuilderTests.java @@ -35,16 +35,11 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; -import org.opensearch.cluster.metadata.Metadata; import org.opensearch.common.ParsingException; import org.opensearch.common.xcontent.XContentParser; -import org.opensearch.index.mapper.IdFieldMapper; import org.opensearch.test.AbstractQueryTestCase; import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Set; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.contains; @@ -52,47 +47,19 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase { - private Set assertedWarnings = new HashSet<>(); - @Override protected IdsQueryBuilder doCreateTestQueryBuilder() { - final String type; - if (randomBoolean()) { - if (frequently()) { - type = "_doc"; - } else { - type = randomAlphaOfLengthBetween(1, 10); - } - } else if (randomBoolean()) { - type = Metadata.ALL; - } else { - type = null; - } int numberOfIds = randomIntBetween(0, 10); String[] ids = new String[numberOfIds]; for (int i = 0; i < numberOfIds; i++) { ids[i] = randomAlphaOfLengthBetween(1, 10); } - IdsQueryBuilder query; - if (type != null && randomBoolean()) { - query = new IdsQueryBuilder().types(type); - query.addIds(ids); - } else { - query = new IdsQueryBuilder(); - query.addIds(ids); - } - return query; + return new IdsQueryBuilder().addIds(ids); } @Override protected void doAssertLuceneQuery(IdsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { - boolean allTypes = queryBuilder.types().length == 0 || queryBuilder.types().length == 1 && "_all".equals(queryBuilder.types()[0]); - if (queryBuilder.ids().size() == 0 - // no types - || context.fieldMapper(IdFieldMapper.NAME) == null - // there are types, but disjoint from the query - || (allTypes == false - && Arrays.asList(queryBuilder.types()).indexOf(context.getMapperService().documentMapper().type()) == -1)) { + if (queryBuilder.ids().size() == 0) { assertThat(query, instanceOf(MatchNoDocsQuery.class)); } else { assertThat(query, instanceOf(TermInSetQuery.class)); @@ -100,11 +67,8 @@ protected void doAssertLuceneQuery(IdsQueryBuilder queryBuilder, Query query, Qu } public void testIllegalArguments() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new IdsQueryBuilder().types((String[]) null)); - assertEquals("[ids] types cannot be null", e.getMessage()); - IdsQueryBuilder idsQueryBuilder = new IdsQueryBuilder(); - e = expectThrows(IllegalArgumentException.class, () -> idsQueryBuilder.addIds((String[]) null)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> idsQueryBuilder.addIds((String[]) null)); assertEquals("[ids] ids cannot be null", e.getMessage()); } @@ -116,59 +80,21 @@ public void testIdsQueryWithInvalidValues() throws Exception { } public void testFromJson() throws IOException { - String json = "{\n" - + " \"ids\" : {\n" - + " \"type\" : [ \"my_type\" ],\n" - + " \"values\" : [ \"1\", \"100\", \"4\" ],\n" - + " \"boost\" : 1.0\n" - + " }\n" - + "}"; + String json = "{\n" + " \"ids\" : {\n" + " \"values\" : [ \"1\", \"100\", \"4\" ],\n" + " \"boost\" : 1.0\n" + " }\n" + "}"; IdsQueryBuilder parsed = (IdsQueryBuilder) parseQuery(json); checkGeneratedJson(json, parsed); assertThat(parsed.ids(), contains("1", "100", "4")); - assertEquals(json, "my_type", parsed.types()[0]); // check that type that is not an array and also ids that are numbers are parsed - json = "{\n" - + " \"ids\" : {\n" - + " \"type\" : \"my_type\",\n" - + " \"values\" : [ 1, 100, 4 ],\n" - + " \"boost\" : 1.0\n" - + " }\n" - + "}"; + json = "{\n" + " \"ids\" : {\n" + " \"values\" : [ 1, 100, 4 ],\n" + " \"boost\" : 1.0\n" + " }\n" + "}"; parsed = (IdsQueryBuilder) parseQuery(json); assertThat(parsed.ids(), contains("1", "100", "4")); - assertEquals(json, "my_type", parsed.types()[0]); - - // check with empty type array - json = "{\n" - + " \"ids\" : {\n" - + " \"type\" : [ ],\n" - + " \"values\" : [ \"1\", \"100\", \"4\" ],\n" - + " \"boost\" : 1.0\n" - + " }\n" - + "}"; - parsed = (IdsQueryBuilder) parseQuery(json); - assertThat(parsed.ids(), contains("1", "100", "4")); - assertEquals(json, 0, parsed.types().length); - - // check without type - json = "{\n" + " \"ids\" : {\n" + " \"values\" : [ \"1\", \"100\", \"4\" ],\n" + " \"boost\" : 1.0\n" + " }\n" + "}"; - parsed = (IdsQueryBuilder) parseQuery(json); - assertThat(parsed.ids(), contains("1", "100", "4")); - assertEquals(json, 0, parsed.types().length); } @Override protected QueryBuilder parseQuery(XContentParser parser) throws IOException { QueryBuilder query = super.parseQuery(parser); assertThat(query, instanceOf(IdsQueryBuilder.class)); - - IdsQueryBuilder idsQuery = (IdsQueryBuilder) query; - if (idsQuery.types().length > 0 && !assertedWarnings.contains(IdsQueryBuilder.TYPES_DEPRECATION_MESSAGE)) { - assertWarnings(IdsQueryBuilder.TYPES_DEPRECATION_MESSAGE); - assertedWarnings.add(IdsQueryBuilder.TYPES_DEPRECATION_MESSAGE); - } return query; } diff --git a/server/src/test/java/org/opensearch/index/query/IntervalBuilderTests.java b/server/src/test/java/org/opensearch/index/query/IntervalBuilderTests.java index f2e42e82fc964..9cb8108818705 100644 --- a/server/src/test/java/org/opensearch/index/query/IntervalBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/IntervalBuilderTests.java @@ -78,6 +78,19 @@ public void testUnordered() throws IOException { } + public void testUnorderedNoOverlap() throws IOException { + + CannedTokenStream ts = new CannedTokenStream(new Token("term1", 1, 2), new Token("term2", 3, 4), new Token("term3", 5, 6)); + + IntervalsSource source = BUILDER.analyzeText(new CachingTokenFilter(ts), -1, IntervalMode.UNORDERED_NO_OVERLAP); + IntervalsSource expected = Intervals.unorderedNoOverlaps( + Intervals.unorderedNoOverlaps(Intervals.term("term1"), Intervals.term("term2")), + Intervals.term("term3") + ); + + assertEquals(expected, source); + } + public void testPhrase() throws IOException { CannedTokenStream ts = new CannedTokenStream(new Token("term1", 1, 2), new Token("term2", 3, 4), new Token("term3", 5, 6)); diff --git a/server/src/test/java/org/opensearch/index/query/IntervalQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/IntervalQueryBuilderTests.java index 9dd991f200714..d7f57eef5e039 100644 --- a/server/src/test/java/org/opensearch/index/query/IntervalQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/IntervalQueryBuilderTests.java @@ -137,10 +137,24 @@ static IntervalsSourceProvider.Disjunction createRandomDisjunction(int depth, bo static IntervalsSourceProvider.Combine createRandomCombine(int depth, boolean useScripts) { int count = randomInt(5) + 1; List subSources = createRandomSourceList(depth, useScripts, count); - boolean ordered = randomBoolean(); + IntervalMode mode; + switch (randomIntBetween(0, 2)) { + case 0: + mode = IntervalMode.ORDERED; + break; + case 1: + mode = IntervalMode.UNORDERED; + break; + case 2: + mode = IntervalMode.UNORDERED_NO_OVERLAP; + break; + default: + throw new AssertionError("Illegal randomisation branch"); + } + int maxGaps = randomInt(5) - 1; IntervalsSourceProvider.IntervalFilter filter = createRandomFilter(depth + 1, useScripts); - return new IntervalsSourceProvider.Combine(subSources, ordered, maxGaps, filter); + return new IntervalsSourceProvider.Combine(subSources, mode, maxGaps, filter); } static List createRandomSourceList(int depth, boolean useScripts, int count) { @@ -173,10 +187,23 @@ static IntervalsSourceProvider.Match createRandomMatch(int depth, boolean useScr words.add(randomRealisticUnicodeOfLengthBetween(4, 20)); } String text = String.join(" ", words); - boolean mOrdered = randomBoolean(); + IntervalMode mMode; + switch (randomIntBetween(0, 2)) { + case 0: + mMode = IntervalMode.ORDERED; + break; + case 1: + mMode = IntervalMode.UNORDERED; + break; + case 2: + mMode = IntervalMode.UNORDERED_NO_OVERLAP; + break; + default: + throw new AssertionError("Illegal randomisation branch"); + } int maxMGaps = randomInt(5) - 1; String analyzer = randomFrom("simple", "keyword", "whitespace"); - return new IntervalsSourceProvider.Match(text, maxMGaps, mOrdered, analyzer, createRandomFilter(depth + 1, useScripts), useField); + return new IntervalsSourceProvider.Match(text, maxMGaps, mMode, analyzer, createRandomFilter(depth + 1, useScripts), useField); } @Override @@ -190,7 +217,7 @@ public void testCacheability() throws IOException { IntervalsSourceProvider.IntervalFilter scriptFilter = new IntervalsSourceProvider.IntervalFilter( new Script(ScriptType.INLINE, "mockscript", "1", Collections.emptyMap()) ); - IntervalsSourceProvider source = new IntervalsSourceProvider.Match("text", 0, true, "simple", scriptFilter, null); + IntervalsSourceProvider source = new IntervalsSourceProvider.Match("text", 0, IntervalMode.ORDERED, "simple", scriptFilter, null); queryBuilder = new IntervalQueryBuilder(TEXT_FIELD_NAME, source); rewriteQuery = rewriteQuery(queryBuilder, new QueryShardContext(context)); assertNotNull(rewriteQuery.toQuery(context)); @@ -243,7 +270,7 @@ public void testMatchInterval() throws IOException { + "\" : { " + " \"match\" : { " + " \"query\" : \"Hello world\"," - + " \"ordered\" : true }," + + " \"mode\" : \"ordered\" }," + " \"boost\" : 2 } } }"; builder = (IntervalQueryBuilder) parseQuery(json); @@ -253,6 +280,90 @@ public void testMatchInterval() throws IOException { ); assertEquals(expected, builder.toQuery(createShardContext())); + json = "{ \"intervals\" : " + + "{ \"" + + TEXT_FIELD_NAME + + "\" : { " + + " \"match\" : { " + + " \"query\" : \"Hello world\"," + + " \"mode\" : \"unordered_no_overlap\" }," + + " \"boost\" : 2 } } }"; + + builder = (IntervalQueryBuilder) parseQuery(json); + expected = new BoostQuery( + new IntervalQuery(TEXT_FIELD_NAME, Intervals.unorderedNoOverlaps(Intervals.term("hello"), Intervals.term("world"))), + 2 + ); + assertEquals(expected, builder.toQuery(createShardContext())); + + json = "{ \"intervals\" : " + + "{ \"" + + TEXT_FIELD_NAME + + "\" : { " + + " \"match\" : { " + + " \"query\" : \"Hello world\"," + + " \"mode\" : \"unordered_no_overlap\"," + + " \"max_gaps\" : 11 }," + + " \"boost\" : 2 } } }"; + + builder = (IntervalQueryBuilder) parseQuery(json); + expected = new BoostQuery( + new IntervalQuery( + TEXT_FIELD_NAME, + Intervals.maxgaps(11, Intervals.unorderedNoOverlaps(Intervals.term("hello"), Intervals.term("world"))) + ), + 2 + ); + assertEquals(expected, builder.toQuery(createShardContext())); + + json = "{ \"intervals\" : " + + "{ \"" + + TEXT_FIELD_NAME + + "\" : { " + + " \"match\" : { " + + " \"query\" : \"Hello Open Search\"," + + " \"mode\" : \"unordered_no_overlap\" }," + + " \"boost\" : 3 } } }"; + + builder = (IntervalQueryBuilder) parseQuery(json); + expected = new BoostQuery( + new IntervalQuery( + TEXT_FIELD_NAME, + Intervals.unorderedNoOverlaps( + Intervals.unorderedNoOverlaps(Intervals.term("hello"), Intervals.term("open")), + Intervals.term("search") + ) + ), + 3 + ); + assertEquals(expected, builder.toQuery(createShardContext())); + + json = "{ \"intervals\" : " + + "{ \"" + + TEXT_FIELD_NAME + + "\" : { " + + " \"match\" : { " + + " \"query\" : \"Hello Open Search\"," + + " \"mode\" : \"unordered_no_overlap\"," + + " \"max_gaps\": 12 }," + + " \"boost\" : 3 } } }"; + + builder = (IntervalQueryBuilder) parseQuery(json); + expected = new BoostQuery( + new IntervalQuery( + TEXT_FIELD_NAME, + Intervals.maxgaps( + 12, + Intervals.unorderedNoOverlaps( + Intervals.maxgaps(12, Intervals.unorderedNoOverlaps(Intervals.term("hello"), Intervals.term("open"))), + Intervals.term("search") + ) + ) + ), + 3 + ); + assertEquals(expected, builder.toQuery(createShardContext())); + json = "{ \"intervals\" : " + "{ \"" + TEXT_FIELD_NAME @@ -261,7 +372,7 @@ public void testMatchInterval() throws IOException { + " \"query\" : \"Hello world\"," + " \"max_gaps\" : 10," + " \"analyzer\" : \"whitespace\"," - + " \"ordered\" : true } } } }"; + + " \"mode\" : \"ordered\" } } } }"; builder = (IntervalQueryBuilder) parseQuery(json); expected = new IntervalQuery( @@ -281,7 +392,7 @@ public void testMatchInterval() throws IOException { + " \"use_field\" : \"" + MASKED_FIELD + "\"," - + " \"ordered\" : true } } } }"; + + " \"mode\" : \"ordered\" } } } }"; builder = (IntervalQueryBuilder) parseQuery(json); expected = new IntervalQuery( @@ -298,7 +409,7 @@ public void testMatchInterval() throws IOException { + " \"query\" : \"Hello world\"," + " \"max_gaps\" : 10," + " \"analyzer\" : \"whitespace\"," - + " \"ordered\" : true," + + " \"mode\" : \"ordered\"," + " \"filter\" : {" + " \"containing\" : {" + " \"match\" : { \"query\" : \"blah\" } } } } } } }"; @@ -350,11 +461,11 @@ public void testCombineInterval() throws IOException { + TEXT_FIELD_NAME + "\": {" + " \"all_of\" : {" - + " \"ordered\" : true," + + " \"mode\" : \"ordered\"," + " \"intervals\" : [" + " { \"match\" : { \"query\" : \"one\" } }," + " { \"all_of\" : { " - + " \"ordered\" : false," + + " \"mode\" : \"unordered\"," + " \"intervals\" : [" + " { \"match\" : { \"query\" : \"two\" } }," + " { \"match\" : { \"query\" : \"three\" } } ] } } ]," @@ -381,6 +492,52 @@ public void testCombineInterval() throws IOException { ); assertEquals(expected, builder.toQuery(createShardContext())); + json = "{ \"intervals\" : { \"" + + TEXT_FIELD_NAME + + "\": {" + + " \"all_of\" : {" + + " \"mode\" : \"unordered_no_overlap\"," + + " \"intervals\" : [" + + " { \"match\" : { \"query\" : \"one\" } }," + + " { \"match\" : { \"query\" : \"two\" } } ]," + + " \"max_gaps\" : 30 }," + + " \"boost\" : 1.5 } } }"; + builder = (IntervalQueryBuilder) parseQuery(json); + expected = new BoostQuery( + new IntervalQuery( + TEXT_FIELD_NAME, + Intervals.maxgaps(30, Intervals.unorderedNoOverlaps(Intervals.term("one"), Intervals.term("two"))) + ), + 1.5f + ); + assertEquals(expected, builder.toQuery(createShardContext())); + + json = "{ \"intervals\" : { \"" + + TEXT_FIELD_NAME + + "\": {" + + " \"all_of\" : {" + + " \"mode\" : \"unordered_no_overlap\"," + + " \"intervals\" : [" + + " { \"match\" : { \"query\" : \"one\" } }," + + " { \"match\" : { \"query\" : \"two\" } }," + + " { \"match\" : { \"query\" : \"three\" } } ]," + + " \"max_gaps\" : 3 }," + + " \"boost\" : 3.5 } } }"; + builder = (IntervalQueryBuilder) parseQuery(json); + expected = new BoostQuery( + new IntervalQuery( + TEXT_FIELD_NAME, + Intervals.maxgaps( + 3, + Intervals.unorderedNoOverlaps( + Intervals.maxgaps(3, Intervals.unorderedNoOverlaps(Intervals.term("one"), Intervals.term("two"))), + Intervals.term("three") + ) + ) + ), + 3.5f + ); + assertEquals(expected, builder.toQuery(createShardContext())); } public void testCombineDisjunctionInterval() throws IOException { @@ -389,7 +546,7 @@ public void testCombineDisjunctionInterval() throws IOException { + TEXT_FIELD_NAME + "\": { " + " \"all_of\" : {" - + " \"ordered\" : true," + + " \"mode\" : \"ordered\"," + " \"intervals\" : [" + " { \"match\" : { \"query\" : \"atmosphere\" } }," + " { \"any_of\" : {" @@ -416,7 +573,7 @@ public void testCombineDisjunctionInterval() throws IOException { } public void testNonIndexedFields() throws IOException { - IntervalsSourceProvider provider = new IntervalsSourceProvider.Match("test", 0, true, null, null, null); + IntervalsSourceProvider provider = new IntervalsSourceProvider.Match("test", 0, IntervalMode.ORDERED, null, null, null); IntervalQueryBuilder b = new IntervalQueryBuilder("no_such_field", provider); assertThat(b.toQuery(createShardContext()), equalTo(new MatchNoDocsQuery())); @@ -446,7 +603,7 @@ public void testNonIndexedFields() throws IOException { + " \"use_field\" : \"" + NO_POSITIONS_FIELD + "\"," - + " \"ordered\" : true } } } }"; + + " \"mode\" : \"ordered\" } } } }"; e = expectThrows(IllegalArgumentException.class, () -> { IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); @@ -689,7 +846,11 @@ public void testWildcard() throws IOException { } private static IntervalsSource buildRegexpSource(String pattern, int flags, Integer maxExpansions) { - final RegExp regexp = new RegExp(pattern, flags); + return buildRegexpSource(pattern, flags, 0, maxExpansions); + } + + private static IntervalsSource buildRegexpSource(String pattern, int flags, int matchFlags, Integer maxExpansions) { + final RegExp regexp = new RegExp(pattern, flags, matchFlags); CompiledAutomaton automaton = new CompiledAutomaton(regexp.toAutomaton()); if (maxExpansions != null) { @@ -765,6 +926,15 @@ public void testRegexp() throws IOException { expected = new IntervalQuery(TEXT_FIELD_NAME, buildRegexpSource("te.m", DEFAULT_FLAGS, 500)); assertEquals(expected, builder.toQuery(createShardContext())); + String regexp_case_insensitive_json = "{ \"intervals\" : { \"" + + TEXT_FIELD_NAME + + "\": { " + + "\"regexp\" : { \"pattern\" : \"TE.M\", \"case_insensitive\" : true } } } }"; + + builder = (IntervalQueryBuilder) parseQuery(regexp_case_insensitive_json); + expected = new IntervalQuery(TEXT_FIELD_NAME, buildRegexpSource("TE.M", DEFAULT_FLAGS, RegExp.ASCII_CASE_INSENSITIVE, null)); + assertEquals(expected, builder.toQuery(createShardContext())); + String regexp_neg_max_expand_json = "{ \"intervals\" : { \"" + TEXT_FIELD_NAME + "\": { " diff --git a/server/src/test/java/org/opensearch/index/query/MatchIntervalsSourceProviderTests.java b/server/src/test/java/org/opensearch/index/query/MatchIntervalsSourceProviderTests.java index 2725df7dd06d9..3f9d075d72532 100644 --- a/server/src/test/java/org/opensearch/index/query/MatchIntervalsSourceProviderTests.java +++ b/server/src/test/java/org/opensearch/index/query/MatchIntervalsSourceProviderTests.java @@ -53,7 +53,7 @@ protected Match createTestInstance() { protected Match mutateInstance(Match instance) throws IOException { String query = instance.getQuery(); int maxGaps = instance.getMaxGaps(); - boolean isOrdered = instance.isOrdered(); + IntervalMode mode = instance.getMode(); String analyzer = instance.getAnalyzer(); IntervalsSourceProvider.IntervalFilter filter = instance.getFilter(); String useField = instance.getUseField(); @@ -65,7 +65,13 @@ protected Match mutateInstance(Match instance) throws IOException { maxGaps++; break; case 2: - isOrdered = !isOrdered; + if (mode == IntervalMode.ORDERED) { + mode = randomBoolean() ? IntervalMode.UNORDERED : IntervalMode.UNORDERED_NO_OVERLAP; + } else if (mode == IntervalMode.UNORDERED) { + mode = randomBoolean() ? IntervalMode.ORDERED : IntervalMode.UNORDERED_NO_OVERLAP; + } else { + mode = randomBoolean() ? IntervalMode.UNORDERED : IntervalMode.ORDERED; + } break; case 3: analyzer = analyzer == null ? randomAlphaOfLength(5) : null; @@ -81,7 +87,7 @@ protected Match mutateInstance(Match instance) throws IOException { default: throw new AssertionError("Illegal randomisation branch"); } - return new Match(query, maxGaps, isOrdered, analyzer, filter, useField); + return new Match(query, maxGaps, mode, analyzer, filter, useField); } @Override diff --git a/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java index c4aba907f4f40..bf42aca156805 100644 --- a/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java @@ -390,13 +390,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws "_doc", new CompressedXContent( Strings.toString( - PutMappingRequest.buildFromSimplifiedDef( - "_doc", - "string_boost", - "type=text", - "string_no_pos", - "type=text,index_options=docs" - ) + PutMappingRequest.buildFromSimplifiedDef("string_boost", "type=text", "string_no_pos", "type=text,index_options=docs") ) ), MapperService.MergeReason.MAPPING_UPDATE diff --git a/server/src/test/java/org/opensearch/index/query/MoreLikeThisQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/MoreLikeThisQueryBuilderTests.java index 0b75e9eb32314..2061378c3f54f 100644 --- a/server/src/test/java/org/opensearch/index/query/MoreLikeThisQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/MoreLikeThisQueryBuilderTests.java @@ -112,14 +112,7 @@ private static String[] randomStringFields() { private Item generateRandomItem() { String index = randomBoolean() ? getIndex().getName() : null; // indexed item or artificial document - Item item; - - if (randomBoolean()) { - item = randomBoolean() ? new Item(index, randomAlphaOfLength(10)) : new Item(index, randomArtificialDoc()); - } else { - String type = "doc"; - item = randomBoolean() ? new Item(index, type, randomAlphaOfLength(10)) : new Item(index, type, randomArtificialDoc()); - } + Item item = randomBoolean() ? new Item(index, randomAlphaOfLength(10)) : new Item(index, randomArtificialDoc()); // if no field is specified MLT uses all mapped fields for this item if (randomBoolean()) { @@ -247,7 +240,7 @@ protected MultiTermVectorsResponse executeMultiTermVectors(MultiTermVectorsReque MultiTermVectorsItemResponse[] responses = new MultiTermVectorsItemResponse[mtvRequest.size()]; int i = 0; for (TermVectorsRequest request : mtvRequest) { - TermVectorsResponse response = new TermVectorsResponse(request.index(), request.type(), request.id()); + TermVectorsResponse response = new TermVectorsResponse(request.index(), request.id()); response.setExists(true); Fields generatedFields; if (request.doc() != null) { @@ -449,11 +442,9 @@ public void testFromJson() throws IOException { + " \"fields\" : [ \"title\", \"description\" ],\n" + " \"like\" : [ \"and potentially some more text here as well\", {\n" + " \"_index\" : \"imdb\",\n" - + " \"_type\" : \"movies\",\n" + " \"_id\" : \"1\"\n" + " }, {\n" + " \"_index\" : \"imdb\",\n" - + " \"_type\" : \"movies\",\n" + " \"_id\" : \"2\"\n" + " } ],\n" + " \"max_query_terms\" : 12,\n" @@ -481,12 +472,6 @@ public void testFromJson() throws IOException { protected QueryBuilder parseQuery(XContentParser parser) throws IOException { QueryBuilder query = super.parseQuery(parser); assertThat(query, instanceOf(MoreLikeThisQueryBuilder.class)); - - MoreLikeThisQueryBuilder mltQuery = (MoreLikeThisQueryBuilder) query; - if (mltQuery.isTypeless() == false && !assertedWarnings.contains(MoreLikeThisQueryBuilder.TYPES_DEPRECATION_MESSAGE)) { - assertWarnings(MoreLikeThisQueryBuilder.TYPES_DEPRECATION_MESSAGE); - assertedWarnings.add(MoreLikeThisQueryBuilder.TYPES_DEPRECATION_MESSAGE); - } return query; } diff --git a/server/src/test/java/org/opensearch/index/query/NestedQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/NestedQueryBuilderTests.java index 8cc24a658025a..b95d9f8d36ad8 100644 --- a/server/src/test/java/org/opensearch/index/query/NestedQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/NestedQueryBuilderTests.java @@ -78,7 +78,6 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws new CompressedXContent( Strings.toString( PutMappingRequest.buildFromSimplifiedDef( - "_doc", TEXT_FIELD_NAME, "type=text", INT_FIELD_NAME, diff --git a/server/src/test/java/org/opensearch/index/query/QueryShardContextTests.java b/server/src/test/java/org/opensearch/index/query/QueryShardContextTests.java index f9db638c8245b..b803e7b5686dc 100644 --- a/server/src/test/java/org/opensearch/index/query/QueryShardContextTests.java +++ b/server/src/test/java/org/opensearch/index/query/QueryShardContextTests.java @@ -209,14 +209,10 @@ public void testIndexSortedOnField() { } public void testFielddataLookupSelfReference() { - QueryShardContext queryShardContext = createQueryShardContext( - "uuid", - null, - (field, leafLookup, docId) -> { - // simulate a runtime field that depends on itself e.g. field: doc['field'] - return leafLookup.doc().get(field).toString(); - } - ); + QueryShardContext queryShardContext = createQueryShardContext("uuid", null, (field, leafLookup, docId) -> { + // simulate a runtime field that depends on itself e.g. field: doc['field'] + return leafLookup.doc().get(field).toString(); + }); IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> collect("field", queryShardContext)); assertEquals("Cyclic dependency detected while resolving runtime fields: field -> field", iae.getMessage()); } diff --git a/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java index d2aa512a43ed3..8eaeaa17f7bb5 100644 --- a/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java @@ -1075,7 +1075,7 @@ public void testDisabledFieldNamesField() throws Exception { .merge( "_doc", new CompressedXContent( - Strings.toString(PutMappingRequest.buildFromSimplifiedDef("_doc", "foo", "type=text", "_field_names", "enabled=false")) + Strings.toString(PutMappingRequest.buildFromSimplifiedDef("foo", "type=text", "_field_names", "enabled=false")) ), MapperService.MergeReason.MAPPING_UPDATE ); @@ -1091,9 +1091,7 @@ public void testDisabledFieldNamesField() throws Exception { .merge( "_doc", new CompressedXContent( - Strings.toString( - PutMappingRequest.buildFromSimplifiedDef("_doc", "foo", "type=text", "_field_names", "enabled=true") - ) + Strings.toString(PutMappingRequest.buildFromSimplifiedDef("foo", "type=text", "_field_names", "enabled=true")) ), MapperService.MergeReason.MAPPING_UPDATE ); diff --git a/server/src/test/java/org/opensearch/index/query/RandomQueryBuilder.java b/server/src/test/java/org/opensearch/index/query/RandomQueryBuilder.java index 8efff55a65d1a..03930edbf89d6 100644 --- a/server/src/test/java/org/opensearch/index/query/RandomQueryBuilder.java +++ b/server/src/test/java/org/opensearch/index/query/RandomQueryBuilder.java @@ -34,7 +34,6 @@ import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import com.carrotsearch.randomizedtesting.generators.RandomStrings; -import org.opensearch.common.Strings; import java.util.Random; @@ -63,7 +62,7 @@ public static QueryBuilder createQuery(Random r) { case 2: // We make sure this query has no types to avoid deprecation warnings in the // tests that use this method. - return new IdsQueryBuilderTests().createTestQueryBuilder().types(Strings.EMPTY_ARRAY); + return new IdsQueryBuilderTests().createTestQueryBuilder(); case 3: return createMultiTermQuery(r); default: diff --git a/server/src/test/java/org/opensearch/index/query/RegexpIntervalsSourceProviderTests.java b/server/src/test/java/org/opensearch/index/query/RegexpIntervalsSourceProviderTests.java index ba97bdddf52ff..bf6809e5cb446 100644 --- a/server/src/test/java/org/opensearch/index/query/RegexpIntervalsSourceProviderTests.java +++ b/server/src/test/java/org/opensearch/index/query/RegexpIntervalsSourceProviderTests.java @@ -32,7 +32,8 @@ static Regexp createRandomRegexp() { randomAlphaOfLengthBetween(0, 3) + (randomBoolean() ? ".*?" : "." + randomAlphaOfLength(4)) + randomAlphaOfLengthBetween(0, 5), randomBoolean() ? RegexpFlag.resolveValue(randomFrom(FLAGS)) : RegexpFlag.ALL.value(), randomBoolean() ? randomAlphaOfLength(10) : null, - randomBoolean() ? randomIntBetween(-1, Integer.MAX_VALUE) : null + randomBoolean() ? randomIntBetween(-1, Integer.MAX_VALUE) : null, + randomBoolean() ); } @@ -42,7 +43,9 @@ protected Regexp mutateInstance(Regexp instance) throws IOException { int flags = instance.getFlags(); String useField = instance.getUseField(); Integer maxExpansions = instance.getMaxExpansions(); - int ran = between(0, 3); + boolean caseInsensitive = instance.isCaseInsensitive(); + + int ran = between(0, 4); switch (ran) { case 0: pattern += randomBoolean() ? ".*?" : randomAlphaOfLength(5); @@ -56,10 +59,13 @@ protected Regexp mutateInstance(Regexp instance) throws IOException { case 3: maxExpansions = maxExpansions == null ? randomIntBetween(1, Integer.MAX_VALUE) : null; break; + case 4: + caseInsensitive = !caseInsensitive; + break; default: throw new AssertionError("Illegal randomisation branch"); } - return new Regexp(pattern, flags, useField, maxExpansions); + return new Regexp(pattern, flags, useField, maxExpansions, caseInsensitive); } @Override diff --git a/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java index 9e6c496c1a0ca..ea93d7a65b951 100644 --- a/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java @@ -119,9 +119,7 @@ protected TermsQueryBuilder doCreateTestQueryBuilder() { private TermsLookup randomTermsLookup() { // Randomly choose between a typeless terms lookup and one with an explicit type to make sure we are - TermsLookup lookup = maybeIncludeType && randomBoolean() - ? new TermsLookup(randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10), termsPath) - : new TermsLookup(randomAlphaOfLength(10), randomAlphaOfLength(10), termsPath); + TermsLookup lookup = new TermsLookup(randomAlphaOfLength(10), randomAlphaOfLength(10), termsPath); // testing both cases. lookup.routing(randomBoolean() ? randomAlphaOfLength(10) : null); return lookup; @@ -241,9 +239,7 @@ public GetResponse executeGet(GetRequest getRequest) { } catch (IOException ex) { throw new OpenSearchException("boom", ex); } - return new GetResponse( - new GetResult(getRequest.index(), getRequest.type(), getRequest.id(), 0, 1, 0, true, new BytesArray(json), null, null) - ); + return new GetResponse(new GetResult(getRequest.index(), getRequest.id(), 0, 1, 0, true, new BytesArray(json), null, null)); } public void testNumeric() throws IOException { @@ -381,13 +377,6 @@ protected QueryBuilder parseQuery(XContentParser parser) throws IOException { try { QueryBuilder query = super.parseQuery(parser); assertThat(query, CoreMatchers.instanceOf(TermsQueryBuilder.class)); - - TermsQueryBuilder termsQuery = (TermsQueryBuilder) query; - String deprecationWarning = "Deprecated field [type] used, this field is unused and will be removed entirely"; - if (termsQuery.isTypeless() == false && !assertedWarnings.contains(deprecationWarning)) { - assertWarnings(deprecationWarning); - assertedWarnings.add(deprecationWarning); - } return query; } finally { diff --git a/server/src/test/java/org/opensearch/index/query/TermsSetQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/TermsSetQueryBuilderTests.java index f81938ff9df9b..c6cd667338303 100644 --- a/server/src/test/java/org/opensearch/index/query/TermsSetQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/TermsSetQueryBuilderTests.java @@ -93,7 +93,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws String docType = "_doc"; mapperService.merge( docType, - new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(docType, "m_s_m", "type=long"))), + new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef("m_s_m", "type=long"))), MapperService.MergeReason.MAPPING_UPDATE ); } diff --git a/server/src/test/java/org/opensearch/index/query/TypeQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/TypeQueryBuilderTests.java deleted file mode 100644 index f4f5662755a54..0000000000000 --- a/server/src/test/java/org/opensearch/index/query/TypeQueryBuilderTests.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.index.query; - -import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.Query; -import org.opensearch.common.lucene.search.Queries; -import org.opensearch.test.AbstractQueryTestCase; - -import java.io.IOException; - -import static org.hamcrest.Matchers.equalTo; - -public class TypeQueryBuilderTests extends AbstractQueryTestCase { - - @Override - protected TypeQueryBuilder doCreateTestQueryBuilder() { - return new TypeQueryBuilder("_doc"); - } - - @Override - protected void doAssertLuceneQuery(TypeQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { - if (createShardContext().getMapperService().documentMapper(queryBuilder.type()) == null) { - assertEquals(new MatchNoDocsQuery(), query); - } else { - assertThat(query, equalTo(Queries.newNonNestedFilter(context.indexVersionCreated()))); - } - } - - public void testIllegalArgument() { - expectThrows(IllegalArgumentException.class, () -> new TypeQueryBuilder((String) null)); - } - - public void testFromJson() throws IOException { - String json = "{\n" + " \"type\" : {\n" + " \"value\" : \"my_type\",\n" + " \"boost\" : 1.0\n" + " }\n" + "}"; - - TypeQueryBuilder parsed = (TypeQueryBuilder) parseQuery(json); - checkGeneratedJson(json, parsed); - - assertEquals(json, "my_type", parsed.type()); - } - - @Override - public void testToQuery() throws IOException { - super.testToQuery(); - assertWarnings(TypeQueryBuilder.TYPES_DEPRECATION_MESSAGE); - } - - @Override - public void testMustRewrite() throws IOException { - super.testMustRewrite(); - assertWarnings(TypeQueryBuilder.TYPES_DEPRECATION_MESSAGE); - } - - @Override - public void testCacheability() throws IOException { - super.testCacheability(); - assertWarnings(TypeQueryBuilder.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/server/src/test/java/org/opensearch/index/query/functionscore/FunctionScoreTests.java b/server/src/test/java/org/opensearch/index/query/functionscore/FunctionScoreTests.java index 62557da2adb62..2bfcec1bf786c 100644 --- a/server/src/test/java/org/opensearch/index/query/functionscore/FunctionScoreTests.java +++ b/server/src/test/java/org/opensearch/index/query/functionscore/FunctionScoreTests.java @@ -88,6 +88,7 @@ import java.util.concurrent.ExecutionException; import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.endsWith; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsEqual.equalTo; import static org.hamcrest.core.IsNot.not; @@ -283,7 +284,8 @@ protected boolean sortRequiresCustomComparator() { 0, GaussDecayFunctionBuilder.GAUSS_DECAY_FUNCTION, new IndexNumericFieldDataStub(), - MultiValueMode.MAX + MultiValueMode.MAX, + null ); private static final ScoreFunction EXP_DECAY_FUNCTION = new DecayFunctionBuilder.NumericFieldDataScoreFunction( 0, @@ -292,7 +294,8 @@ protected boolean sortRequiresCustomComparator() { 0, ExponentialDecayFunctionBuilder.EXP_DECAY_FUNCTION, new IndexNumericFieldDataStub(), - MultiValueMode.MAX + MultiValueMode.MAX, + null ); private static final ScoreFunction LIN_DECAY_FUNCTION = new DecayFunctionBuilder.NumericFieldDataScoreFunction( 0, @@ -301,7 +304,48 @@ protected boolean sortRequiresCustomComparator() { 0, LinearDecayFunctionBuilder.LINEAR_DECAY_FUNCTION, new IndexNumericFieldDataStub(), - MultiValueMode.MAX + MultiValueMode.MAX, + null + ); + + private static final ScoreFunction RANDOM_SCORE_FUNCTION_NAMED = new RandomScoreFunction(0, 0, new IndexFieldDataStub(), "func1"); + private static final ScoreFunction FIELD_VALUE_FACTOR_FUNCTION_NAMED = new FieldValueFactorFunction( + "test", + 1, + FieldValueFactorFunction.Modifier.LN, + 1.0, + null, + "func1" + ); + private static final ScoreFunction GAUSS_DECAY_FUNCTION_NAMED = new DecayFunctionBuilder.NumericFieldDataScoreFunction( + 0, + 1, + 0.1, + 0, + GaussDecayFunctionBuilder.GAUSS_DECAY_FUNCTION, + new IndexNumericFieldDataStub(), + MultiValueMode.MAX, + "func1" + ); + private static final ScoreFunction EXP_DECAY_FUNCTION_NAMED = new DecayFunctionBuilder.NumericFieldDataScoreFunction( + 0, + 1, + 0.1, + 0, + ExponentialDecayFunctionBuilder.EXP_DECAY_FUNCTION, + new IndexNumericFieldDataStub(), + MultiValueMode.MAX, + "func1" + ); + private static final ScoreFunction LIN_DECAY_FUNCTION_NAMED = new DecayFunctionBuilder.NumericFieldDataScoreFunction( + 0, + 1, + 0.1, + 0, + LinearDecayFunctionBuilder.LINEAR_DECAY_FUNCTION, + new IndexNumericFieldDataStub(), + MultiValueMode.MAX, + "func1" ); private static final ScoreFunction WEIGHT_FACTOR_FUNCTION = new WeightFactorFunction(4); private static final String TEXT = "The way out is through."; @@ -383,6 +427,58 @@ public void testExplainFunctionScoreQuery() throws IOException { assertThat(functionExplanation.getDetails()[0].getDetails()[1].getDetails().length, equalTo(0)); } + public void testExplainFunctionScoreQueryWithName() throws IOException { + Explanation functionExplanation = getFunctionScoreExplanation(searcher, RANDOM_SCORE_FUNCTION_NAMED); + checkFunctionScoreExplanation(functionExplanation, "random score function (seed: 0, field: test, _name: func1)"); + assertThat(functionExplanation.getDetails()[0].getDetails().length, equalTo(0)); + + functionExplanation = getFunctionScoreExplanation(searcher, FIELD_VALUE_FACTOR_FUNCTION_NAMED); + checkFunctionScoreExplanation(functionExplanation, "field value function(_name: func1): ln(doc['test'].value?:1.0 * factor=1.0)"); + assertThat(functionExplanation.getDetails()[0].getDetails().length, equalTo(0)); + + functionExplanation = getFunctionScoreExplanation(searcher, GAUSS_DECAY_FUNCTION_NAMED); + checkFunctionScoreExplanation(functionExplanation, "Function for field test:"); + assertThat( + functionExplanation.getDetails()[0].getDetails()[0].toString(), + equalTo( + "0.1 = exp(-0.5*pow(MAX[Math.max(Math.abs" + + "(1.0(=doc value) - 0.0(=origin))) - 0.0(=offset), 0)],2.0)/0.21714724095162594, _name: func1)\n" + ) + ); + assertThat(functionExplanation.getDetails()[0].getDetails()[0].getDetails().length, equalTo(0)); + + functionExplanation = getFunctionScoreExplanation(searcher, EXP_DECAY_FUNCTION_NAMED); + checkFunctionScoreExplanation(functionExplanation, "Function for field test:"); + assertThat( + functionExplanation.getDetails()[0].getDetails()[0].toString(), + equalTo( + "0.1 = exp(- MAX[Math.max(Math.abs(1.0(=doc value) - 0.0(=origin))) - 0.0(=offset), 0)] * 2.3025850929940455, _name: func1)\n" + ) + ); + assertThat(functionExplanation.getDetails()[0].getDetails()[0].getDetails().length, equalTo(0)); + + functionExplanation = getFunctionScoreExplanation(searcher, LIN_DECAY_FUNCTION_NAMED); + checkFunctionScoreExplanation(functionExplanation, "Function for field test:"); + assertThat( + functionExplanation.getDetails()[0].getDetails()[0].toString(), + equalTo( + "0.1 = max(0.0, ((1.1111111111111112" + + " - MAX[Math.max(Math.abs(1.0(=doc value) - 0.0(=origin))) - 0.0(=offset), 0)])/1.1111111111111112, _name: func1)\n" + ) + ); + assertThat(functionExplanation.getDetails()[0].getDetails()[0].getDetails().length, equalTo(0)); + + functionExplanation = getFunctionScoreExplanation(searcher, new WeightFactorFunction(4, RANDOM_SCORE_FUNCTION_NAMED)); + checkFunctionScoreExplanation(functionExplanation, "product of:"); + assertThat( + functionExplanation.getDetails()[0].getDetails()[0].toString(), + endsWith("random score function (seed: 0, field: test, _name: func1)\n") + ); + assertThat(functionExplanation.getDetails()[0].getDetails()[1].toString(), equalTo("4.0 = weight\n")); + assertThat(functionExplanation.getDetails()[0].getDetails()[0].getDetails().length, equalTo(0)); + assertThat(functionExplanation.getDetails()[0].getDetails()[1].getDetails().length, equalTo(0)); + } + public Explanation getFunctionScoreExplanation(IndexSearcher searcher, ScoreFunction scoreFunction) throws IOException { FunctionScoreQuery functionScoreQuery = new FunctionScoreQuery(new TermQuery(TERM), scoreFunction, CombineFunction.AVG, 0.0f, 100); Weight weight = searcher.createWeight(searcher.rewrite(functionScoreQuery), org.apache.lucene.search.ScoreMode.COMPLETE, 1f); diff --git a/server/src/test/java/org/opensearch/index/reindex/BulkByScrollResponseTests.java b/server/src/test/java/org/opensearch/index/reindex/BulkByScrollResponseTests.java index eaf283ae56f38..6ee3f3c0bced4 100644 --- a/server/src/test/java/org/opensearch/index/reindex/BulkByScrollResponseTests.java +++ b/server/src/test/java/org/opensearch/index/reindex/BulkByScrollResponseTests.java @@ -81,14 +81,7 @@ public void testRountTrip() throws IOException { private List randomIndexingFailures() { return usually() ? emptyList() - : singletonList( - new Failure( - randomSimpleString(random()), - randomSimpleString(random()), - randomSimpleString(random()), - new IllegalArgumentException("test") - ) - ); + : singletonList(new Failure(randomSimpleString(random()), randomSimpleString(random()), new IllegalArgumentException("test"))); } private List randomSearchFailures() { @@ -119,7 +112,6 @@ private void assertResponseEquals(BulkByScrollResponse expected, BulkByScrollRes Failure expectedFailure = expected.getBulkFailures().get(i); Failure actualFailure = actual.getBulkFailures().get(i); assertEquals(expectedFailure.getIndex(), actualFailure.getIndex()); - assertEquals(expectedFailure.getType(), actualFailure.getType()); assertEquals(expectedFailure.getId(), actualFailure.getId()); assertEquals(expectedFailure.getMessage(), actualFailure.getMessage()); assertEquals(expectedFailure.getStatus(), actualFailure.getStatus()); @@ -150,7 +142,6 @@ public static void assertEqualBulkResponse( Failure expectedFailure = expected.getBulkFailures().get(i); Failure actualFailure = actual.getBulkFailures().get(i); assertEquals(expectedFailure.getIndex(), actualFailure.getIndex()); - assertEquals(expectedFailure.getType(), actualFailure.getType()); assertEquals(expectedFailure.getId(), actualFailure.getId()); assertEquals(expectedFailure.getStatus(), actualFailure.getStatus()); } diff --git a/server/src/test/java/org/opensearch/index/reindex/DeleteByQueryRequestTests.java b/server/src/test/java/org/opensearch/index/reindex/DeleteByQueryRequestTests.java index f88b257d76a2f..3ff4b3ec26231 100644 --- a/server/src/test/java/org/opensearch/index/reindex/DeleteByQueryRequestTests.java +++ b/server/src/test/java/org/opensearch/index/reindex/DeleteByQueryRequestTests.java @@ -95,30 +95,6 @@ protected void extraForSliceAssertions(DeleteByQueryRequest original, DeleteByQu // No extra assertions needed } - public void testTypesGetter() { - int numTypes = between(1, 50); - String[] types = new String[numTypes]; - for (int i = 0; i < numTypes; i++) { - types[i] = randomSimpleString(random(), 1, 30); - } - SearchRequest searchRequest = new SearchRequest(); - searchRequest.types(types); - DeleteByQueryRequest request = new DeleteByQueryRequest(searchRequest); - assertArrayEquals(request.types(), types); - } - - public void testTypesSetter() { - int numTypes = between(1, 50); - String[] types = new String[numTypes]; - for (int i = 0; i < numTypes; i++) { - types[i] = randomSimpleString(random(), 1, 30); - } - SearchRequest searchRequest = new SearchRequest(); - DeleteByQueryRequest request = new DeleteByQueryRequest(searchRequest); - request.types(types); - assertArrayEquals(request.types(), types); - } - public void testValidateGivenNoQuery() { SearchRequest searchRequest = new SearchRequest(); DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(searchRequest); diff --git a/server/src/test/java/org/opensearch/index/reindex/ReindexRequestTests.java b/server/src/test/java/org/opensearch/index/reindex/ReindexRequestTests.java index ac999d34785ea..6fe277ad2751b 100644 --- a/server/src/test/java/org/opensearch/index/reindex/ReindexRequestTests.java +++ b/server/src/test/java/org/opensearch/index/reindex/ReindexRequestTests.java @@ -112,9 +112,6 @@ protected ReindexRequest createTestInstance() { if (randomBoolean()) { reindexRequest.setSourceBatchSize(randomInt(100)); } - if (randomBoolean()) { - reindexRequest.setDestDocType("type"); - } if (randomBoolean()) { reindexRequest.setDestOpType("create"); } @@ -160,7 +157,6 @@ protected void assertEqualInstances(ReindexRequest expectedInstance, ReindexRequ assertEquals(expectedInstance.getDestination().getPipeline(), newInstance.getDestination().getPipeline()); assertEquals(expectedInstance.getDestination().routing(), newInstance.getDestination().routing()); assertEquals(expectedInstance.getDestination().opType(), newInstance.getDestination().opType()); - assertEquals(expectedInstance.getDestination().type(), newInstance.getDestination().type()); } public void testReindexFromRemoteDoesNotSupportSearchQuery() { diff --git a/server/src/test/java/org/opensearch/index/replication/IndexLevelReplicationTests.java b/server/src/test/java/org/opensearch/index/replication/IndexLevelReplicationTests.java index 5a366574fd397..d262b5abec0f3 100644 --- a/server/src/test/java/org/opensearch/index/replication/IndexLevelReplicationTests.java +++ b/server/src/test/java/org/opensearch/index/replication/IndexLevelReplicationTests.java @@ -57,6 +57,7 @@ import org.opensearch.index.engine.InternalEngineTests; import org.opensearch.index.engine.SegmentsStats; import org.opensearch.index.engine.VersionConflictEngineException; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.SeqNoFieldMapper; import org.opensearch.index.seqno.SeqNoStats; import org.opensearch.index.seqno.SequenceNumbers; @@ -75,7 +76,6 @@ import java.util.Collections; import java.util.HashSet; import java.util.List; -import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; @@ -173,7 +173,7 @@ public void cleanFiles( public void testRetryAppendOnlyAfterRecovering() throws Exception { try (ReplicationGroup shards = createGroup(0)) { shards.startAll(); - final IndexRequest originalRequest = new IndexRequest(index.getName(), "type").source("{}", XContentType.JSON); + final IndexRequest originalRequest = new IndexRequest(index.getName()).source("{}", XContentType.JSON); originalRequest.process(Version.CURRENT, null, index.getName()); final IndexRequest retryRequest = copyIndexRequest(originalRequest); retryRequest.onRetry(); @@ -214,7 +214,7 @@ public IndexResult index(Index op) throws IOException { }) { shards.startAll(); Thread thread = new Thread(() -> { - IndexRequest indexRequest = new IndexRequest(index.getName(), "type").source("{}", XContentType.JSON); + IndexRequest indexRequest = new IndexRequest(index.getName()).source("{}", XContentType.JSON); try { shards.index(indexRequest); } catch (Exception e) { @@ -244,7 +244,7 @@ public void prepareForTranslogOperations(int totalTranslogOps, ActionListener mappings = Collections.singletonMap( - "type", - "{ \"type\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}" - ); + String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}"; try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetadata(2, mappings))) { shards.startAll(); List replicas = shards.getReplicas(); IndexShard replica1 = replicas.get(0); - IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "1").source("{ \"f\": \"1\"}", XContentType.JSON); + IndexRequest indexRequest = new IndexRequest(index.getName()).id("1").source("{ \"f\": \"1\"}", XContentType.JSON); logger.info("--> isolated replica " + replica1.routingEntry()); BulkShardRequest replicationRequest = indexOnPrimary(indexRequest, shards.getPrimary()); for (int i = 1; i < replicas.size(); i++) { @@ -332,7 +329,7 @@ public void testConflictingOpsOnReplica() throws Exception { logger.info("--> promoting replica to primary " + replica1.routingEntry()); shards.promoteReplicaToPrimary(replica1).get(); - indexRequest = new IndexRequest(index.getName(), "type", "1").source("{ \"f\": \"2\"}", XContentType.JSON); + indexRequest = new IndexRequest(index.getName()).id("1").source("{ \"f\": \"2\"}", XContentType.JSON); shards.index(indexRequest); shards.refresh("test"); for (IndexShard shard : shards) { @@ -345,10 +342,7 @@ public void testConflictingOpsOnReplica() throws Exception { } public void testReplicaTermIncrementWithConcurrentPrimaryPromotion() throws Exception { - Map mappings = Collections.singletonMap( - "type", - "{ \"type\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}" - ); + String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}"; try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetadata(2, mappings))) { shards.startAll(); long primaryPrimaryTerm = shards.getPrimary().getPendingPrimaryTerm(); @@ -362,7 +356,7 @@ public void testReplicaTermIncrementWithConcurrentPrimaryPromotion() throws Exce assertEquals(primaryPrimaryTerm, replica2.getPendingPrimaryTerm()); - IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "1").source("{ \"f\": \"1\"}", XContentType.JSON); + IndexRequest indexRequest = new IndexRequest(index.getName()).id("1").source("{ \"f\": \"1\"}", XContentType.JSON); BulkShardRequest replicationRequest = indexOnPrimary(indexRequest, replica1); CyclicBarrier barrier = new CyclicBarrier(2); @@ -398,14 +392,11 @@ public void testReplicaTermIncrementWithConcurrentPrimaryPromotion() throws Exce } public void testReplicaOperationWithConcurrentPrimaryPromotion() throws Exception { - Map mappings = Collections.singletonMap( - "type", - "{ \"type\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}" - ); + String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}"; try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetadata(1, mappings))) { shards.startAll(); long primaryPrimaryTerm = shards.getPrimary().getPendingPrimaryTerm(); - IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "1").source("{ \"f\": \"1\"}", XContentType.JSON); + IndexRequest indexRequest = new IndexRequest(index.getName()).id("1").source("{ \"f\": \"1\"}", XContentType.JSON); BulkShardRequest replicationRequest = indexOnPrimary(indexRequest, shards.getPrimary()); List replicas = shards.getReplicas(); @@ -485,7 +476,7 @@ protected EngineFactory getEngineFactory(ShardRouting routing) { shards.startPrimary(); long primaryTerm = shards.getPrimary().getPendingPrimaryTerm(); List expectedTranslogOps = new ArrayList<>(); - BulkItemResponse indexResp = shards.index(new IndexRequest(index.getName(), "type", "1").source("{}", XContentType.JSON)); + BulkItemResponse indexResp = shards.index(new IndexRequest(index.getName()).id("1").source("{}", XContentType.JSON)); assertThat(indexResp.isFailed(), equalTo(true)); assertThat(indexResp.getFailure().getCause(), equalTo(indexException)); expectedTranslogOps.add(new Translog.NoOp(0, primaryTerm, indexException.toString())); @@ -508,18 +499,12 @@ protected EngineFactory getEngineFactory(ShardRouting routing) { assertThat(snapshot.totalOperations(), equalTo(0)); } } - try ( - Translog.Snapshot snapshot = shard.getHistoryOperations( - "test", - shard.indexSettings().isSoftDeleteEnabled() ? Engine.HistorySource.INDEX : Engine.HistorySource.TRANSLOG, - 0 - ) - ) { + try (Translog.Snapshot snapshot = shard.newChangesSnapshot("test", 0, Long.MAX_VALUE, false, randomBoolean())) { assertThat(snapshot, SnapshotMatchers.containsOperationsInAnyOrder(expectedTranslogOps)); } } // the failure replicated directly from the replication channel. - indexResp = shards.index(new IndexRequest(index.getName(), "type", "any").source("{}", XContentType.JSON)); + indexResp = shards.index(new IndexRequest(index.getName()).id("any").source("{}", XContentType.JSON)); assertThat(indexResp.getFailure().getCause(), equalTo(indexException)); Translog.NoOp noop2 = new Translog.NoOp(1, primaryTerm, indexException.toString()); expectedTranslogOps.add(noop2); @@ -532,13 +517,7 @@ protected EngineFactory getEngineFactory(ShardRouting routing) { assertThat(snapshot, SnapshotMatchers.containsOperationsInAnyOrder(Collections.singletonList(noop2))); } } - try ( - Translog.Snapshot snapshot = shard.getHistoryOperations( - "test", - shard.indexSettings().isSoftDeleteEnabled() ? Engine.HistorySource.INDEX : Engine.HistorySource.TRANSLOG, - 0 - ) - ) { + try (Translog.Snapshot snapshot = shard.newChangesSnapshot("test", 0, Long.MAX_VALUE, false, randomBoolean())) { assertThat(snapshot, SnapshotMatchers.containsOperationsInAnyOrder(expectedTranslogOps)); } } @@ -552,9 +531,7 @@ protected EngineFactory getEngineFactory(ShardRouting routing) { public void testRequestFailureReplication() throws Exception { try (ReplicationGroup shards = createGroup(0)) { shards.startAll(); - BulkItemResponse response = shards.index( - new IndexRequest(index.getName(), "type", "1").source("{}", XContentType.JSON).version(2) - ); + BulkItemResponse response = shards.index(new IndexRequest(index.getName()).id("1").source("{}", XContentType.JSON).version(2)); assertTrue(response.isFailed()); assertThat(response.getFailure().getCause(), instanceOf(VersionConflictEngineException.class)); shards.assertAllEqual(0); @@ -572,7 +549,7 @@ public void testRequestFailureReplication() throws Exception { shards.addReplica(); } shards.startReplicas(nReplica); - response = shards.index(new IndexRequest(index.getName(), "type", "1").source("{}", XContentType.JSON).version(2)); + response = shards.index(new IndexRequest(index.getName()).id("1").source("{}", XContentType.JSON).version(2)); assertTrue(response.isFailed()); assertThat(response.getFailure().getCause(), instanceOf(VersionConflictEngineException.class)); shards.assertAllEqual(0); @@ -605,7 +582,7 @@ public void testSeqNoCollision() throws Exception { shards.syncGlobalCheckpoint(); logger.info("--> Isolate replica1"); - IndexRequest indexDoc1 = new IndexRequest(index.getName(), "type", "d1").source("{}", XContentType.JSON); + IndexRequest indexDoc1 = new IndexRequest(index.getName()).id("d1").source("{}", XContentType.JSON); BulkShardRequest replicationRequest = indexOnPrimary(indexDoc1, shards.getPrimary()); indexOnReplica(replicationRequest, shards, replica2); @@ -625,7 +602,7 @@ public void testSeqNoCollision() throws Exception { } logger.info("--> Promote replica1 as the primary"); shards.promoteReplicaToPrimary(replica1).get(); // wait until resync completed. - shards.index(new IndexRequest(index.getName(), "type", "d2").source("{}", XContentType.JSON)); + shards.index(new IndexRequest(index.getName()).id("d2").source("{}", XContentType.JSON)); final Translog.Operation op2; try (Translog.Snapshot snapshot = getTranslog(replica2).newSnapshot()) { assertThat(snapshot.totalOperations(), equalTo(1)); @@ -642,7 +619,7 @@ public void testSeqNoCollision() throws Exception { shards.promoteReplicaToPrimary(replica2).get(); logger.info("--> Recover replica3 from replica2"); recoverReplica(replica3, replica2, true); - try (Translog.Snapshot snapshot = replica3.newChangesSnapshot("test", 0, Long.MAX_VALUE, false)) { + try (Translog.Snapshot snapshot = replica3.newChangesSnapshot("test", 0, Long.MAX_VALUE, false, true)) { assertThat(snapshot.totalOperations(), equalTo(initDocs + 1)); final List expectedOps = new ArrayList<>(initOperations); expectedOps.add(op2); @@ -675,10 +652,10 @@ public void testLateDeliveryAfterGCTriggeredOnReplica() throws Exception { updateGCDeleteCycle(replica, gcInterval); final BulkShardRequest indexRequest = indexOnPrimary( - new IndexRequest(index.getName(), "type", "d1").source("{}", XContentType.JSON), + new IndexRequest(index.getName()).id("d1").source("{}", XContentType.JSON), primary ); - final BulkShardRequest deleteRequest = deleteOnPrimary(new DeleteRequest(index.getName(), "type", "d1"), primary); + final BulkShardRequest deleteRequest = deleteOnPrimary(new DeleteRequest(index.getName()).id("d1"), primary); deleteOnReplica(deleteRequest, shards, replica); // delete arrives on replica first. final long deleteTimestamp = threadPool.relativeTimeInMillis(); replica.refresh("test"); @@ -712,11 +689,11 @@ public void testOutOfOrderDeliveryForAppendOnlyOperations() throws Exception { final IndexShard replica = shards.getReplicas().get(0); // Append-only request - without id final BulkShardRequest indexRequest = indexOnPrimary( - new IndexRequest(index.getName(), "type", null).source("{}", XContentType.JSON), + new IndexRequest(index.getName()).id(null).source("{}", XContentType.JSON), primary ); final String docId = Iterables.get(getShardDocUIDs(primary), 0); - final BulkShardRequest deleteRequest = deleteOnPrimary(new DeleteRequest(index.getName(), "type", docId), primary); + final BulkShardRequest deleteRequest = deleteOnPrimary(new DeleteRequest(index.getName()).id(docId), primary); deleteOnReplica(deleteRequest, shards, replica); indexOnReplica(indexRequest, shards, replica); shards.assertAllEqual(0); @@ -732,12 +709,12 @@ public void testIndexingOptimizationUsingSequenceNumbers() throws Exception { for (int i = 0; i < numDocs; i++) { String id = Integer.toString(randomIntBetween(1, 100)); if (randomBoolean()) { - group.index(new IndexRequest(index.getName(), "type", id).source("{}", XContentType.JSON)); + group.index(new IndexRequest(index.getName()).id(id).source("{}", XContentType.JSON)); if (liveDocs.add(id) == false) { versionLookups++; } } else { - group.delete(new DeleteRequest(index.getName(), "type", id)); + group.delete(new DeleteRequest(index.getName(), id)); liveDocs.remove(id); versionLookups++; } diff --git a/server/src/test/java/org/opensearch/index/replication/RecoveryDuringReplicationTests.java b/server/src/test/java/org/opensearch/index/replication/RecoveryDuringReplicationTests.java index c59d4849feffb..add2ecd34e3af 100644 --- a/server/src/test/java/org/opensearch/index/replication/RecoveryDuringReplicationTests.java +++ b/server/src/test/java/org/opensearch/index/replication/RecoveryDuringReplicationTests.java @@ -75,10 +75,8 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.EnumSet; import java.util.List; -import java.util.Map; import java.util.Optional; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Future; @@ -143,7 +141,7 @@ public void testRecoveryToReplicaThatReceivedExtraDocument() throws Exception { shards.startAll(); final int docs = randomIntBetween(0, 16); for (int i = 0; i < docs; i++) { - shards.index(new IndexRequest("index", "type", Integer.toString(i)).source("{}", XContentType.JSON)); + shards.index(new IndexRequest("index").id(Integer.toString(i)).source("{}", XContentType.JSON)); } shards.flush(); @@ -159,7 +157,7 @@ public void testRecoveryToReplicaThatReceivedExtraDocument() throws Exception { 1, randomNonNegativeLong(), false, - new SourceToParse("index", "type", "replica", new BytesArray("{}"), XContentType.JSON) + new SourceToParse("index", "replica", new BytesArray("{}"), XContentType.JSON) ); shards.promoteReplicaToPrimary(promotedReplica).get(); oldPrimary.close("demoted", randomBoolean()); @@ -173,7 +171,7 @@ public void testRecoveryToReplicaThatReceivedExtraDocument() throws Exception { promotedReplica.applyIndexOperationOnPrimary( Versions.MATCH_ANY, VersionType.INTERNAL, - new SourceToParse("index", "type", "primary", new BytesArray("{}"), XContentType.JSON), + new SourceToParse("index", "primary", new BytesArray("{}"), XContentType.JSON), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, @@ -210,10 +208,7 @@ public void testRecoveryAfterPrimaryPromotion() throws Exception { final int rollbackDocs = randomIntBetween(1, 5); logger.info("--> indexing {} rollback docs", rollbackDocs); for (int i = 0; i < rollbackDocs; i++) { - final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "rollback_" + i).source( - "{}", - XContentType.JSON - ); + final IndexRequest indexRequest = new IndexRequest(index.getName()).id("rollback_" + i).source("{}", XContentType.JSON); final BulkShardRequest bulkShardRequest = indexOnPrimary(indexRequest, oldPrimary); indexOnReplica(bulkShardRequest, shards, replica); } @@ -331,7 +326,7 @@ public void testReplicaRollbackStaleDocumentsInPeerRecovery() throws Exception { int staleDocs = scaledRandomIntBetween(1, 10); logger.info("--> indexing {} stale docs", staleDocs); for (int i = 0; i < staleDocs; i++) { - final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "stale_" + i).source("{}", XContentType.JSON); + final IndexRequest indexRequest = new IndexRequest(index.getName()).id("stale_" + i).source("{}", XContentType.JSON); final BulkShardRequest bulkShardRequest = indexOnPrimary(indexRequest, oldPrimary); indexOnReplica(bulkShardRequest, shards, replica); } @@ -361,19 +356,14 @@ public void testReplicaRollbackStaleDocumentsInPeerRecovery() throws Exception { } public void testResyncAfterPrimaryPromotion() throws Exception { - Map mappings = Collections.singletonMap( - "type", - "{ \"type\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}" - ); + String mappings = "{ \"_doc\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}"; try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetadata(2, mappings))) { shards.startAll(); int initialDocs = randomInt(10); for (int i = 0; i < initialDocs; i++) { - final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "initial_doc_" + i).source( - "{ \"f\": \"normal\"}", - XContentType.JSON - ); + final IndexRequest indexRequest = new IndexRequest(index.getName()).id("initial_doc_" + i) + .source("{ \"f\": \"normal\"}", XContentType.JSON); shards.index(indexRequest); } @@ -390,10 +380,8 @@ public void testResyncAfterPrimaryPromotion() throws Exception { final int extraDocs = randomInt(5); logger.info("--> indexing {} extra docs", extraDocs); for (int i = 0; i < extraDocs; i++) { - final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "extra_doc_" + i).source( - "{ \"f\": \"normal\"}", - XContentType.JSON - ); + final IndexRequest indexRequest = new IndexRequest(index.getName()).id("extra_doc_" + i) + .source("{ \"f\": \"normal\"}", XContentType.JSON); final BulkShardRequest bulkShardRequest = indexOnPrimary(indexRequest, oldPrimary); indexOnReplica(bulkShardRequest, shards, newPrimary); } @@ -401,10 +389,8 @@ public void testResyncAfterPrimaryPromotion() throws Exception { final int extraDocsToBeTrimmed = randomIntBetween(0, 10); logger.info("--> indexing {} extra docs to be trimmed", extraDocsToBeTrimmed); for (int i = 0; i < extraDocsToBeTrimmed; i++) { - final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "extra_trimmed_" + i).source( - "{ \"f\": \"trimmed\"}", - XContentType.JSON - ); + final IndexRequest indexRequest = new IndexRequest(index.getName()).id("extra_trimmed_" + i) + .source("{ \"f\": \"trimmed\"}", XContentType.JSON); final BulkShardRequest bulkShardRequest = indexOnPrimary(indexRequest, oldPrimary); // have to replicate to another replica != newPrimary one - the subject to trim indexOnReplica(bulkShardRequest, shards, justReplica); @@ -472,7 +458,7 @@ protected EngineFactory getEngineFactory(ShardRouting routing) { final String id = "pending_" + i; threadPool.generic().submit(() -> { try { - shards.index(new IndexRequest(index.getName(), "type", id).source("{}", XContentType.JSON)); + shards.index(new IndexRequest(index.getName()).id(id).source("{}", XContentType.JSON)); } catch (Exception e) { throw new AssertionError(e); } finally { @@ -563,7 +549,7 @@ public void indexTranslogOperations( replicaEngineFactory.latchIndexers(1); threadPool.generic().submit(() -> { try { - shards.index(new IndexRequest(index.getName(), "type", "pending").source("{}", XContentType.JSON)); + shards.index(new IndexRequest(index.getName()).id("pending").source("{}", XContentType.JSON)); } catch (final Exception e) { throw new RuntimeException(e); } finally { @@ -575,7 +561,7 @@ public void indexTranslogOperations( replicaEngineFactory.awaitIndexersLatch(); // unblock indexing for the next doc replicaEngineFactory.allowIndexing(); - shards.index(new IndexRequest(index.getName(), "type", "completed").source("{}", XContentType.JSON)); + shards.index(new IndexRequest(index.getName()).id("completed").source("{}", XContentType.JSON)); pendingDocActiveWithExtraDocIndexed.countDown(); } catch (final Exception e) { throw new AssertionError(e); @@ -615,7 +601,7 @@ public void indexTranslogOperations( // wait for the translog phase to complete and the recovery to block global checkpoint advancement assertBusy(() -> assertTrue(shards.getPrimary().pendingInSync())); { - shards.index(new IndexRequest(index.getName(), "type", "last").source("{}", XContentType.JSON)); + shards.index(new IndexRequest(index.getName()).id("last").source("{}", XContentType.JSON)); final long expectedDocs = docs + 3L; assertThat(shards.getPrimary().getLocalCheckpoint(), equalTo(expectedDocs - 1)); // recovery is now in the process of being completed, therefore the global checkpoint can not have advanced on the primary @@ -650,7 +636,7 @@ public void testTransferMaxSeenAutoIdTimestampOnResync() throws Exception { long maxTimestampOnReplica2 = -1; List replicationRequests = new ArrayList<>(); for (int numDocs = between(1, 10), i = 0; i < numDocs; i++) { - final IndexRequest indexRequest = new IndexRequest(index.getName(), "type").source("{}", XContentType.JSON); + final IndexRequest indexRequest = new IndexRequest(index.getName()).source("{}", XContentType.JSON); indexRequest.process(Version.CURRENT, null, index.getName()); final IndexRequest copyRequest; if (randomBoolean()) { @@ -708,13 +694,13 @@ public void testAddNewReplicas() throws Exception { int nextId = docId.incrementAndGet(); if (appendOnly) { String id = randomBoolean() ? Integer.toString(nextId) : null; - shards.index(new IndexRequest(index.getName(), "type", id).source("{}", XContentType.JSON)); + shards.index(new IndexRequest(index.getName()).id(id).source("{}", XContentType.JSON)); } else if (frequently()) { String id = Integer.toString(frequently() ? nextId : between(0, nextId)); - shards.index(new IndexRequest(index.getName(), "type", id).source("{}", XContentType.JSON)); + shards.index(new IndexRequest(index.getName()).id(id).source("{}", XContentType.JSON)); } else { String id = Integer.toString(between(0, nextId)); - shards.delete(new DeleteRequest(index.getName(), "type", id)); + shards.delete(new DeleteRequest(index.getName()).id(id)); } if (randomInt(100) < 10) { shards.getPrimary().flush(new FlushRequest()); @@ -749,7 +735,7 @@ public void testRollbackOnPromotion() throws Exception { int inFlightOps = scaledRandomIntBetween(10, 200); for (int i = 0; i < inFlightOps; i++) { String id = "extra-" + i; - IndexRequest primaryRequest = new IndexRequest(index.getName(), "type", id).source("{}", XContentType.JSON); + IndexRequest primaryRequest = new IndexRequest(index.getName()).id(id).source("{}", XContentType.JSON); BulkShardRequest replicationRequest = indexOnPrimary(primaryRequest, shards.getPrimary()); for (IndexShard replica : shards.getReplicas()) { if (randomBoolean()) { diff --git a/server/src/test/java/org/opensearch/index/replication/RetentionLeasesReplicationTests.java b/server/src/test/java/org/opensearch/index/replication/RetentionLeasesReplicationTests.java index abb2a7632e25b..143d11bbbf13f 100644 --- a/server/src/test/java/org/opensearch/index/replication/RetentionLeasesReplicationTests.java +++ b/server/src/test/java/org/opensearch/index/replication/RetentionLeasesReplicationTests.java @@ -187,14 +187,12 @@ public void testTurnOffTranslogRetentionAfterAllShardStarted() throws Exception } group.syncGlobalCheckpoint(); group.flush(); - assertBusy( - () -> { - // we turn off the translog retention policy using the generic threadPool - for (IndexShard shard : group) { - assertThat(shard.translogStats().estimatedNumberOfOperations(), equalTo(0)); - } + assertBusy(() -> { + // we turn off the translog retention policy using the generic threadPool + for (IndexShard shard : group) { + assertThat(shard.translogStats().estimatedNumberOfOperations(), equalTo(0)); } - ); + }); } } diff --git a/server/src/test/java/org/opensearch/index/seqno/ReplicationTrackerTests.java b/server/src/test/java/org/opensearch/index/seqno/ReplicationTrackerTests.java index 50f0fa54e01c0..8fe8a13de9910 100644 --- a/server/src/test/java/org/opensearch/index/seqno/ReplicationTrackerTests.java +++ b/server/src/test/java/org/opensearch/index/seqno/ReplicationTrackerTests.java @@ -1160,23 +1160,21 @@ public void testPeerRecoveryRetentionLeaseCreationAndRenewal() { equalTo(expectedLeaseIds) ); - assertAsTimePasses.accept( - () -> { - // Leases still don't expire - assertThat( - tracker.getRetentionLeases().leases().stream().map(RetentionLease::id).collect(Collectors.toSet()), - equalTo(expectedLeaseIds) - ); + assertAsTimePasses.accept(() -> { + // Leases still don't expire + assertThat( + tracker.getRetentionLeases().leases().stream().map(RetentionLease::id).collect(Collectors.toSet()), + equalTo(expectedLeaseIds) + ); - // Also leases are renewed before reaching half the expiry time - // noinspection OptionalGetWithoutIsPresent - assertThat( - tracker.getRetentionLeases() + " renewed before too long", - tracker.getRetentionLeases().leases().stream().mapToLong(RetentionLease::timestamp).min().getAsLong(), - greaterThanOrEqualTo(currentTimeMillis.get() - peerRecoveryRetentionLeaseRenewalTimeMillis) - ); - } - ); + // Also leases are renewed before reaching half the expiry time + // noinspection OptionalGetWithoutIsPresent + assertThat( + tracker.getRetentionLeases() + " renewed before too long", + tracker.getRetentionLeases().leases().stream().mapToLong(RetentionLease::timestamp).min().getAsLong(), + greaterThanOrEqualTo(currentTimeMillis.get() - peerRecoveryRetentionLeaseRenewalTimeMillis) + ); + }); IndexShardRoutingTable.Builder routingTableBuilder = new IndexShardRoutingTable.Builder(routingTable); for (ShardRouting replicaShard : routingTable.replicaShards()) { @@ -1188,17 +1186,15 @@ public void testPeerRecoveryRetentionLeaseCreationAndRenewal() { tracker.updateFromMaster(initialClusterStateVersion + randomLongBetween(1, 10), ids(activeAllocationIds), routingTable); - assertAsTimePasses.accept( - () -> { - // Leases still don't expire - assertThat( - tracker.getRetentionLeases().leases().stream().map(RetentionLease::id).collect(Collectors.toSet()), - equalTo(expectedLeaseIds) - ); - // ... and any extra peer recovery retention leases are expired immediately since the shard is fully active - tracker.addPeerRecoveryRetentionLease(randomAlphaOfLength(10), randomNonNegativeLong(), ActionListener.wrap(() -> {})); - } - ); + assertAsTimePasses.accept(() -> { + // Leases still don't expire + assertThat( + tracker.getRetentionLeases().leases().stream().map(RetentionLease::id).collect(Collectors.toSet()), + equalTo(expectedLeaseIds) + ); + // ... and any extra peer recovery retention leases are expired immediately since the shard is fully active + tracker.addPeerRecoveryRetentionLease(randomAlphaOfLength(10), randomNonNegativeLong(), ActionListener.wrap(() -> {})); + }); tracker.renewPeerRecoveryRetentionLeases(); assertTrue("expired extra lease", tracker.getRetentionLeases(true).v1()); diff --git a/server/src/test/java/org/opensearch/index/seqno/RetentionLeaseBackgroundSyncActionTests.java b/server/src/test/java/org/opensearch/index/seqno/RetentionLeaseBackgroundSyncActionTests.java index 5e9773b555f3a..54a88d57b2b69 100644 --- a/server/src/test/java/org/opensearch/index/seqno/RetentionLeaseBackgroundSyncActionTests.java +++ b/server/src/test/java/org/opensearch/index/seqno/RetentionLeaseBackgroundSyncActionTests.java @@ -131,21 +131,12 @@ public void testRetentionLeaseBackgroundSyncActionOnPrimary() throws Interrupted ); final CountDownLatch latch = new CountDownLatch(1); - action.shardOperationOnPrimary( - request, - indexShard, - new LatchedActionListener<>( - ActionTestUtils.assertNoFailureListener( - result -> { - // the retention leases on the shard should be persisted - verify(indexShard).persistRetentionLeases(); - // we should forward the request containing the current retention leases to the replica - assertThat(result.replicaRequest(), sameInstance(request)); - } - ), - latch - ) - ); + action.shardOperationOnPrimary(request, indexShard, new LatchedActionListener<>(ActionTestUtils.assertNoFailureListener(result -> { + // the retention leases on the shard should be persisted + verify(indexShard).persistRetentionLeases(); + // we should forward the request containing the current retention leases to the replica + assertThat(result.replicaRequest(), sameInstance(request)); + }), latch)); latch.await(); } diff --git a/server/src/test/java/org/opensearch/index/seqno/RetentionLeaseSyncActionTests.java b/server/src/test/java/org/opensearch/index/seqno/RetentionLeaseSyncActionTests.java index d13b74f1988e2..60ee3360ff235 100644 --- a/server/src/test/java/org/opensearch/index/seqno/RetentionLeaseSyncActionTests.java +++ b/server/src/test/java/org/opensearch/index/seqno/RetentionLeaseSyncActionTests.java @@ -126,20 +126,14 @@ public void testRetentionLeaseSyncActionOnPrimary() { ); final RetentionLeases retentionLeases = mock(RetentionLeases.class); final RetentionLeaseSyncAction.Request request = new RetentionLeaseSyncAction.Request(indexShard.shardId(), retentionLeases); - action.dispatchedShardOperationOnPrimary( - request, - indexShard, - ActionTestUtils.assertNoFailureListener( - result -> { - // the retention leases on the shard should be persisted - verify(indexShard).persistRetentionLeases(); - // we should forward the request containing the current retention leases to the replica - assertThat(result.replicaRequest(), sameInstance(request)); - // we should start with an empty replication response - assertNull(result.finalResponseIfSuccessful.getShardInfo()); - } - ) - ); + action.dispatchedShardOperationOnPrimary(request, indexShard, ActionTestUtils.assertNoFailureListener(result -> { + // the retention leases on the shard should be persisted + verify(indexShard).persistRetentionLeases(); + // we should forward the request containing the current retention leases to the replica + assertThat(result.replicaRequest(), sameInstance(request)); + // we should start with an empty replication response + assertNull(result.finalResponseIfSuccessful.getShardInfo()); + })); } public void testRetentionLeaseSyncActionOnReplica() throws Exception { diff --git a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java index 0c771a46ab226..3bc5218e2f61f 100644 --- a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java @@ -34,6 +34,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; import org.apache.lucene.search.TermQuery; @@ -44,6 +45,7 @@ import org.apache.lucene.store.IOContext; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Constants; +import org.junit.Assert; import org.opensearch.Assertions; import org.opensearch.OpenSearchException; import org.opensearch.Version; @@ -72,11 +74,11 @@ import org.opensearch.common.UUIDs; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.collect.Tuple; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lease.Releasables; -import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.settings.IndexScopedSettings; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; @@ -89,12 +91,10 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.env.NodeEnvironment; import org.opensearch.index.IndexSettings; -import org.opensearch.index.VersionType; import org.opensearch.index.codec.CodecService; import org.opensearch.index.engine.CommitStats; import org.opensearch.index.engine.DocIdSeqNoAndSource; import org.opensearch.index.engine.Engine; -import org.opensearch.index.engine.Engine.DeleteResult; import org.opensearch.index.engine.EngineConfig; import org.opensearch.index.engine.EngineConfigFactory; import org.opensearch.index.engine.EngineTestCase; @@ -108,6 +108,7 @@ import org.opensearch.index.mapper.IdFieldMapper; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.MapperParsingException; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.ParseContext; import org.opensearch.index.mapper.ParsedDocument; import org.opensearch.index.mapper.SeqNoFieldMapper; @@ -141,7 +142,6 @@ import org.opensearch.test.VersionUtils; import org.opensearch.test.store.MockFSDirectoryFactory; import org.opensearch.threadpool.ThreadPool; -import org.junit.Assert; import java.io.IOException; import java.nio.charset.Charset; @@ -168,7 +168,6 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; -import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.LongFunction; @@ -179,12 +178,6 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; -import static org.opensearch.cluster.routing.TestShardRouting.newShardRouting; -import static org.opensearch.common.lucene.Lucene.cleanLuceneIndex; -import static org.opensearch.common.xcontent.ToXContent.EMPTY_PARAMS; -import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; -import static org.opensearch.test.hamcrest.RegexMatcher.matches; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; @@ -204,6 +197,12 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.oneOf; import static org.hamcrest.Matchers.sameInstance; +import static org.opensearch.cluster.routing.TestShardRouting.newShardRouting; +import static org.opensearch.common.lucene.Lucene.cleanLuceneIndex; +import static org.opensearch.common.xcontent.ToXContent.EMPTY_PARAMS; +import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; +import static org.opensearch.test.hamcrest.RegexMatcher.matches; /** * Simple unit-test IndexShard related operations. @@ -1736,9 +1735,7 @@ public void testRefreshMetric() throws IOException { long refreshCount = shard.refreshStats().getTotal(); indexDoc(shard, "_doc", "test"); try ( - Engine.GetResult ignored = shard.get( - new Engine.Get(true, false, "_doc", "test", new Term(IdFieldMapper.NAME, Uid.encodeId("test"))) - ) + Engine.GetResult ignored = shard.get(new Engine.Get(true, false, "test", new Term(IdFieldMapper.NAME, Uid.encodeId("test")))) ) { assertThat(shard.refreshStats().getTotal(), equalTo(refreshCount + 1)); } @@ -1764,9 +1761,7 @@ public void testExternalRefreshMetric() throws IOException { final long extraInternalRefreshes = shard.routingEntry().primary() || shard.indexSettings().isSoftDeleteEnabled() == false ? 0 : 1; indexDoc(shard, "_doc", "test"); try ( - Engine.GetResult ignored = shard.get( - new Engine.Get(true, false, "_doc", "test", new Term(IdFieldMapper.NAME, Uid.encodeId("test"))) - ) + Engine.GetResult ignored = shard.get(new Engine.Get(true, false, "test", new Term(IdFieldMapper.NAME, Uid.encodeId("test")))) ) { assertThat(shard.refreshStats().getExternalTotal(), equalTo(externalRefreshCount)); assertThat(shard.refreshStats().getExternalTotal(), equalTo(shard.refreshStats().getTotal() - 1 - extraInternalRefreshes)); @@ -1866,7 +1861,7 @@ public void postDelete(ShardId shardId, Engine.Delete delete, Exception ex) { assertEquals(0, postDelete.get()); assertEquals(0, postDeleteException.get()); - deleteDoc(shard, "_doc", "1"); + deleteDoc(shard, "1"); assertEquals(2, preIndex.get()); assertEquals(1, postIndexCreate.get()); @@ -1894,7 +1889,7 @@ public void postDelete(ShardId shardId, Engine.Delete delete, Exception ex) { assertEquals(1, postDelete.get()); assertEquals(0, postDeleteException.get()); try { - deleteDoc(shard, "_doc", "1"); + deleteDoc(shard, "1"); fail(); } catch (AlreadyClosedException e) { @@ -2189,7 +2184,7 @@ public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException { final IndexShard shard = newStartedShard(false); long primaryTerm = shard.getOperationPrimaryTerm(); shard.advanceMaxSeqNoOfUpdatesOrDeletes(1); // manually advance msu for this delete - shard.applyDeleteOperationOnReplica(1, primaryTerm, 2, "_doc", "id"); + shard.applyDeleteOperationOnReplica(1, primaryTerm, 2, "id"); shard.getEngine().rollTranslogGeneration(); // isolate the delete in it's own generation shard.applyIndexOperationOnReplica( 0, @@ -2197,7 +2192,7 @@ public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException { 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(shard.shardId().getIndexName(), "_doc", "id", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(shard.shardId().getIndexName(), "id", new BytesArray("{}"), XContentType.JSON) ); shard.applyIndexOperationOnReplica( 3, @@ -2205,7 +2200,7 @@ public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException { 3, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(shard.shardId().getIndexName(), "_doc", "id-3", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(shard.shardId().getIndexName(), "id-3", new BytesArray("{}"), XContentType.JSON) ); // Flushing a new commit with local checkpoint=1 allows to skip the translog gen #1 in recovery. shard.flush(new FlushRequest().force(true).waitIfOngoing(true)); @@ -2215,7 +2210,7 @@ public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException { 3, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(shard.shardId().getIndexName(), "_doc", "id-2", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(shard.shardId().getIndexName(), "id-2", new BytesArray("{}"), XContentType.JSON) ); shard.applyIndexOperationOnReplica( 5, @@ -2223,7 +2218,7 @@ public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException { 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(shard.shardId().getIndexName(), "_doc", "id-5", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(shard.shardId().getIndexName(), "id-5", new BytesArray("{}"), XContentType.JSON) ); shard.sync(); // advance local checkpoint @@ -2363,13 +2358,7 @@ public void testRecoverFromStoreWithNoOps() throws IOException { // start a replica shard and index the second doc final IndexShard otherShard = newStartedShard(false); updateMappings(otherShard, shard.indexSettings().getIndexMetadata()); - SourceToParse sourceToParse = new SourceToParse( - shard.shardId().getIndexName(), - "_doc", - "1", - new BytesArray("{}"), - XContentType.JSON - ); + SourceToParse sourceToParse = new SourceToParse(shard.shardId().getIndexName(), "1", new BytesArray("{}"), XContentType.JSON); otherShard.applyIndexOperationOnReplica( 1, otherShard.getOperationPrimaryTerm(), @@ -2503,7 +2492,7 @@ public void testRecoverFromStoreRemoveStaleOperations() throws Exception { 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(indexName, "_doc", "doc-0", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(indexName, "doc-0", new BytesArray("{}"), XContentType.JSON) ); flushShard(shard); shard.updateGlobalCheckpointOnReplica(0, "test"); // stick the global checkpoint here. @@ -2513,7 +2502,7 @@ public void testRecoverFromStoreRemoveStaleOperations() throws Exception { 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(indexName, "_doc", "doc-1", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(indexName, "doc-1", new BytesArray("{}"), XContentType.JSON) ); flushShard(shard); assertThat(getShardDocUIDs(shard), containsInAnyOrder("doc-0", "doc-1")); @@ -2525,7 +2514,7 @@ public void testRecoverFromStoreRemoveStaleOperations() throws Exception { 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(indexName, "_doc", "doc-2", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(indexName, "doc-2", new BytesArray("{}"), XContentType.JSON) ); flushShard(shard); assertThat(getShardDocUIDs(shard), containsInAnyOrder("doc-0", "doc-1", "doc-2")); @@ -2660,11 +2649,7 @@ public void testReaderWrapperIsUsed() throws IOException { indexDoc(shard, "_doc", "1", "{\"foobar\" : \"bar\"}"); shard.refresh("test"); - try ( - Engine.GetResult getResult = shard.get( - new Engine.Get(false, false, "_doc", "1", new Term(IdFieldMapper.NAME, Uid.encodeId("1"))) - ) - ) { + try (Engine.GetResult getResult = shard.get(new Engine.Get(false, false, "1", new Term(IdFieldMapper.NAME, Uid.encodeId("1"))))) { assertTrue(getResult.exists()); assertNotNull(getResult.searcher()); } @@ -2698,9 +2683,7 @@ public void testReaderWrapperIsUsed() throws IOException { assertEquals(search.totalHits.value, 1); } try ( - Engine.GetResult getResult = newShard.get( - new Engine.Get(false, false, "_doc", "1", new Term(IdFieldMapper.NAME, Uid.encodeId("1"))) - ) + Engine.GetResult getResult = newShard.get(new Engine.Get(false, false, "1", new Term(IdFieldMapper.NAME, Uid.encodeId("1")))) ) { assertTrue(getResult.exists()); assertNotNull(getResult.searcher()); // make sure get uses the wrapped reader @@ -2719,7 +2702,7 @@ public void testReaderWrapperWorksWithGlobalOrdinals() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("test") - .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\", \"fielddata\": true }}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\", \"fielddata\": true }}}") .settings(settings) .primaryTerm(0, 1) .build(); @@ -2772,7 +2755,7 @@ public void testReaderWrapperWorksWithGlobalOrdinals() throws IOException { public void testIndexingOperationListenersIsInvokedOnRecovery() throws IOException { IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0", "{\"foo\" : \"bar\"}"); - deleteDoc(shard, "_doc", "0"); + deleteDoc(shard, "0"); indexDoc(shard, "_doc", "1", "{\"foo\" : \"bar\"}"); shard.refresh("test"); @@ -2859,7 +2842,7 @@ public void testTranslogRecoverySyncsTranslog() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("test") - .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1) .build(); @@ -2904,7 +2887,7 @@ public void testRecoverFromTranslog() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("test") - .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, randomLongBetween(1, Long.MAX_VALUE)) .build(); @@ -2916,7 +2899,6 @@ public void testRecoverFromTranslog() throws IOException { if (randomBoolean()) { operations.add( new Translog.Index( - "_doc", "1", 0, primary.getPendingPrimaryTerm(), @@ -2930,7 +2912,6 @@ public void testRecoverFromTranslog() throws IOException { // corrupt entry operations.add( new Translog.Index( - "_doc", "2", 1, primary.getPendingPrimaryTerm(), @@ -2989,7 +2970,7 @@ public void testShardActiveDuringPeerRecovery() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("test") - .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1) .build(); @@ -3039,7 +3020,7 @@ public void testRefreshListenersDuringPeerRecovery() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("test") - .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1) .build(); @@ -3112,7 +3093,7 @@ public void testRecoverFromLocalShard() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("source") - .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1) .build(); @@ -3139,8 +3120,8 @@ public void testRecoverFromLocalShard() throws IOException { targetShard = newShard(targetRouting); targetShard.markAsRecovering("store", new RecoveryState(targetShard.routingEntry(), localNode, null)); - BiConsumer mappingConsumer = (type, mapping) -> { - assertNull(requestedMappingUpdates.put(type, mapping)); + Consumer mappingConsumer = mapping -> { + assertNull(requestedMappingUpdates.put(MapperService.SINGLE_MAPPING_NAME, mapping)); }; final IndexShard differentIndex = newShard(new ShardId("index_2", "index_2", 0), true); @@ -3249,7 +3230,7 @@ public void testDocStats() throws Exception { ); for (final Integer i : ids) { final String id = Integer.toString(i); - deleteDoc(indexShard, "_doc", id); + deleteDoc(indexShard, id); indexDoc(indexShard, "_doc", id); } // Need to update and sync the global checkpoint and the retention leases for the soft-deletes retention MergePolicy. @@ -3367,7 +3348,7 @@ public void testEstimateTotalDocSize() throws Exception { // Do some updates and deletes, then recheck the correlation again. for (int i = 0; i < numDoc / 2; i++) { if (randomBoolean()) { - deleteDoc(indexShard, "doc", Integer.toString(i)); + deleteDoc(indexShard, Integer.toString(i)); } else { indexDoc(indexShard, "_doc", Integer.toString(i), "{\"foo\": \"bar\"}"); } @@ -3720,7 +3701,6 @@ private Result indexOnReplicaWithGaps(final IndexShard indexShard, final int ope } SourceToParse sourceToParse = new SourceToParse( indexShard.shardId().getIndexName(), - "_doc", id, new BytesArray("{}"), XContentType.JSON @@ -3757,7 +3737,7 @@ public void testIsSearchIdle() throws Exception { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("test") - .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1) .build(); @@ -3807,7 +3787,7 @@ public void testScheduledRefresh() throws Exception { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("test") - .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1) .build(); @@ -3881,7 +3861,7 @@ public void testRefreshIsNeededWithRefreshListeners() throws IOException, Interr .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("test") - .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1) .build(); @@ -3947,7 +3927,7 @@ public void testOnCloseStats() throws IOException { public void testSupplyTombstoneDoc() throws Exception { IndexShard shard = newStartedShard(); String id = randomRealisticUnicodeOfLengthBetween(1, 10); - ParsedDocument deleteTombstone = shard.getEngine().config().getTombstoneDocSupplier().newDeleteTombstoneDoc("doc", id); + ParsedDocument deleteTombstone = shard.getEngine().config().getTombstoneDocSupplier().newDeleteTombstoneDoc(id); assertThat(deleteTombstone.docs(), hasSize(1)); ParseContext.Document deleteDoc = deleteTombstone.docs().get(0); assertThat( @@ -4136,11 +4116,11 @@ public InternalEngine recoverFromTranslog(TranslogRecoveryRunner translogRecover try { readyToSnapshotLatch.await(); shard.snapshotStoreMetadata(); - try (Engine.IndexCommitRef indexCommitRef = shard.acquireLastIndexCommit(randomBoolean())) { - shard.store().getMetadata(indexCommitRef.getIndexCommit()); + try (GatedCloseable wrappedIndexCommit = shard.acquireLastIndexCommit(randomBoolean())) { + shard.store().getMetadata(wrappedIndexCommit.get()); } - try (Engine.IndexCommitRef indexCommitRef = shard.acquireSafeIndexCommit()) { - shard.store().getMetadata(indexCommitRef.getIndexCommit()); + try (GatedCloseable wrappedSafeCommit = shard.acquireSafeIndexCommit()) { + shard.store().getMetadata(wrappedSafeCommit.get()); } } catch (InterruptedException | IOException e) { throw new AssertionError(e); @@ -4178,14 +4158,13 @@ public void testResetEngineWithBrokenTranslog() throws Exception { updateMappings( shard, IndexMetadata.builder(shard.indexSettings.getIndexMetadata()) - .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .build() ); final List operations = Stream.concat( IntStream.range(0, randomIntBetween(0, 10)) .mapToObj( n -> new Translog.Index( - "_doc", "1", 0, shard.getPendingPrimaryTerm(), @@ -4199,7 +4178,6 @@ public void testResetEngineWithBrokenTranslog() throws Exception { IntStream.range(0, randomIntBetween(1, 10)) .mapToObj( n -> new Translog.Index( - "_doc", "1", 0, shard.getPendingPrimaryTerm(), @@ -4302,38 +4280,6 @@ public Settings threadPoolSettings() { return Settings.builder().put(super.threadPoolSettings()).put("thread_pool.estimated_time_interval", "5ms").build(); } - public void testTypelessDelete() throws IOException { - Settings settings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .build(); - IndexMetadata metadata = IndexMetadata.builder("index") - .putMapping("some_type", "{ \"properties\": {}}") - .settings(settings) - .primaryTerm(0, 1) - .build(); - IndexShard shard = newShard(new ShardId(metadata.getIndex(), 0), true, "n1", metadata, null); - recoverShardFromStore(shard); - Engine.IndexResult indexResult = indexDoc(shard, "some_type", "id", "{}"); - assertTrue(indexResult.isCreated()); - - DeleteResult deleteResult = shard.applyDeleteOperationOnPrimary( - Versions.MATCH_ANY, - "some_other_type", - "id", - VersionType.INTERNAL, - UNASSIGNED_SEQ_NO, - 1 - ); - assertFalse(deleteResult.isFound()); - - deleteResult = shard.applyDeleteOperationOnPrimary(Versions.MATCH_ANY, "_doc", "id", VersionType.INTERNAL, UNASSIGNED_SEQ_NO, 1); - assertTrue(deleteResult.isFound()); - - closeShards(shard); - } - public void testTypelessGet() throws IOException { Settings settings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) @@ -4341,7 +4287,7 @@ public void testTypelessGet() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("index") - .putMapping("some_type", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1) .build(); @@ -4351,19 +4297,11 @@ public void testTypelessGet() throws IOException { assertTrue(indexResult.isCreated()); org.opensearch.index.engine.Engine.GetResult getResult = shard.get( - new Engine.Get(true, true, "some_type", "0", new Term("_id", Uid.encodeId("0"))) + new Engine.Get(true, true, "0", new Term("_id", Uid.encodeId("0"))) ); assertTrue(getResult.exists()); getResult.close(); - getResult = shard.get(new Engine.Get(true, true, "some_other_type", "0", new Term("_id", Uid.encodeId("0")))); - assertFalse(getResult.exists()); - getResult.close(); - - getResult = shard.get(new Engine.Get(true, true, "_doc", "0", new Term("_id", Uid.encodeId("0")))); - assertTrue(getResult.exists()); - getResult.close(); - closeShards(shard); } @@ -4402,7 +4340,7 @@ public void testDoNotTrimCommitsWhenOpenReadOnlyEngine() throws Exception { 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(shard.shardId.getIndexName(), "_doc", Long.toString(i), new BytesArray("{}"), XContentType.JSON) + new SourceToParse(shard.shardId.getIndexName(), Long.toString(i), new BytesArray("{}"), XContentType.JSON) ); shard.updateGlobalCheckpointOnReplica(shard.getLocalCheckpoint(), "test"); if (randomInt(100) < 10) { diff --git a/server/src/test/java/org/opensearch/index/shard/IndexingOperationListenerTests.java b/server/src/test/java/org/opensearch/index/shard/IndexingOperationListenerTests.java index 63734831d0964..6ac5f96e11f34 100644 --- a/server/src/test/java/org/opensearch/index/shard/IndexingOperationListenerTests.java +++ b/server/src/test/java/org/opensearch/index/shard/IndexingOperationListenerTests.java @@ -161,7 +161,7 @@ public void postDelete(ShardId shardId, Engine.Delete delete, Exception ex) { logger ); ParsedDocument doc = InternalEngineTests.createParsedDoc("1", null); - Engine.Delete delete = new Engine.Delete("test", "1", new Term("_id", Uid.encodeId(doc.id())), randomNonNegativeLong()); + Engine.Delete delete = new Engine.Delete("1", new Term("_id", Uid.encodeId(doc.id())), randomNonNegativeLong()); Engine.Index index = new Engine.Index(new Term("_id", Uid.encodeId(doc.id())), randomNonNegativeLong(), doc); compositeListener.postDelete(randomShardId, delete, new Engine.DeleteResult(1, 0, SequenceNumbers.UNASSIGNED_SEQ_NO, true)); assertEquals(0, preIndex.get()); diff --git a/server/src/test/java/org/opensearch/index/shard/PrimaryReplicaSyncerTests.java b/server/src/test/java/org/opensearch/index/shard/PrimaryReplicaSyncerTests.java index 4a9b445c12f80..631fa384de335 100644 --- a/server/src/test/java/org/opensearch/index/shard/PrimaryReplicaSyncerTests.java +++ b/server/src/test/java/org/opensearch/index/shard/PrimaryReplicaSyncerTests.java @@ -53,34 +53,23 @@ import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.VersionType; -import org.opensearch.index.engine.Engine; import org.opensearch.index.mapper.SourceToParse; import org.opensearch.index.seqno.SequenceNumbers; -import org.opensearch.index.translog.TestTranslog; -import org.opensearch.index.translog.Translog; import org.opensearch.tasks.Task; import org.opensearch.tasks.TaskManager; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.stream.Collectors; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.mockito.Mockito.anyLong; -import static org.mockito.Mockito.anyString; -import static org.mockito.Mockito.eq; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.when; public class PrimaryReplicaSyncerTests extends IndexShardTestCase { @@ -105,7 +94,7 @@ public void testSyncerSendsOffCorrectDocuments() throws Exception { shard.applyIndexOperationOnPrimary( Versions.MATCH_ANY, VersionType.INTERNAL, - new SourceToParse(shard.shardId().getIndexName(), "_doc", Integer.toString(i), new BytesArray("{}"), XContentType.JSON), + new SourceToParse(shard.shardId().getIndexName(), Integer.toString(i), new BytesArray("{}"), XContentType.JSON), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, @@ -187,7 +176,7 @@ public void testSyncerOnClosingShard() throws Exception { shard.applyIndexOperationOnPrimary( Versions.MATCH_ANY, VersionType.INTERNAL, - new SourceToParse(shard.shardId().getIndexName(), "_doc", Integer.toString(i), new BytesArray("{}"), XContentType.JSON), + new SourceToParse(shard.shardId().getIndexName(), Integer.toString(i), new BytesArray("{}"), XContentType.JSON), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, @@ -238,41 +227,6 @@ public void onResponse(PrimaryReplicaSyncer.ResyncTask result) { } } - public void testDoNotSendOperationsWithoutSequenceNumber() throws Exception { - IndexShard shard = spy(newStartedShard(true)); - when(shard.getLastKnownGlobalCheckpoint()).thenReturn(SequenceNumbers.UNASSIGNED_SEQ_NO); - int numOps = between(0, 20); - List operations = new ArrayList<>(); - for (int i = 0; i < numOps; i++) { - operations.add( - new Translog.Index( - "_doc", - Integer.toString(i), - randomBoolean() ? SequenceNumbers.UNASSIGNED_SEQ_NO : i, - primaryTerm, - new byte[] { 1 } - ) - ); - } - Engine.HistorySource source = shard.indexSettings.isSoftDeleteEnabled() - ? Engine.HistorySource.INDEX - : Engine.HistorySource.TRANSLOG; - doReturn(TestTranslog.newSnapshotFromOperations(operations)).when(shard).getHistoryOperations(anyString(), eq(source), anyLong()); - TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()); - List sentOperations = new ArrayList<>(); - PrimaryReplicaSyncer.SyncAction syncAction = (request, parentTask, allocationId, primaryTerm, listener) -> { - sentOperations.addAll(Arrays.asList(request.getOperations())); - listener.onResponse(new ResyncReplicationResponse()); - }; - PrimaryReplicaSyncer syncer = new PrimaryReplicaSyncer(taskManager, syncAction); - syncer.setChunkSize(new ByteSizeValue(randomIntBetween(1, 10))); - PlainActionFuture fut = new PlainActionFuture<>(); - syncer.resync(shard, fut); - fut.actionGet(); - assertThat(sentOperations, equalTo(operations.stream().filter(op -> op.seqNo() >= 0).collect(Collectors.toList()))); - closeShards(shard); - } - public void testStatusSerialization() throws IOException { PrimaryReplicaSyncer.ResyncTask.Status status = new PrimaryReplicaSyncer.ResyncTask.Status( randomAlphaOfLength(10), diff --git a/server/src/test/java/org/opensearch/index/shard/RefreshListenersTests.java b/server/src/test/java/org/opensearch/index/shard/RefreshListenersTests.java index 593c409bf75b4..eea316d9a9370 100644 --- a/server/src/test/java/org/opensearch/index/shard/RefreshListenersTests.java +++ b/server/src/test/java/org/opensearch/index/shard/RefreshListenersTests.java @@ -367,7 +367,7 @@ public void testLotsOfThreads() throws Exception { } listener.assertNoError(); - Engine.Get get = new Engine.Get(false, false, "test", threadId, new Term(IdFieldMapper.NAME, threadId)); + Engine.Get get = new Engine.Get(false, false, threadId, new Term(IdFieldMapper.NAME, threadId)); try (Engine.GetResult getResult = engine.get(get, engine::acquireSearcher)) { assertTrue("document not found", getResult.exists()); assertEquals(iteration, getResult.version()); @@ -440,17 +440,7 @@ private Engine.IndexResult index(String id, String testFieldValue) throws IOExce document.add(seqID.seqNoDocValue); document.add(seqID.primaryTerm); BytesReference source = new BytesArray(new byte[] { 1 }); - ParsedDocument doc = new ParsedDocument( - versionField, - seqID, - id, - "test", - null, - Arrays.asList(document), - source, - XContentType.JSON, - null - ); + ParsedDocument doc = new ParsedDocument(versionField, seqID, id, null, Arrays.asList(document), source, XContentType.JSON, null); Engine.Index index = new Engine.Index(new Term("_id", doc.id()), engine.config().getPrimaryTermSupplier().getAsLong(), doc); return engine.index(index); } diff --git a/server/src/test/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandTests.java b/server/src/test/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandTests.java index c3ce944f51588..1b8809ba04278 100644 --- a/server/src/test/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandTests.java +++ b/server/src/test/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandTests.java @@ -142,7 +142,7 @@ public void setup() throws IOException { final IndexMetadata.Builder metadata = IndexMetadata.builder(routing.getIndexName()) .settings(settings) .primaryTerm(0, randomIntBetween(1, 100)) - .putMapping("_doc", "{ \"properties\": {} }"); + .putMapping("{ \"properties\": {} }"); indexMetadata = metadata.build(); clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(Metadata.builder().put(indexMetadata, false).build()).build(); diff --git a/server/src/test/java/org/opensearch/index/shard/ShardGetServiceTests.java b/server/src/test/java/org/opensearch/index/shard/ShardGetServiceTests.java index fc8fe408a0c6d..5dd053574268e 100644 --- a/server/src/test/java/org/opensearch/index/shard/ShardGetServiceTests.java +++ b/server/src/test/java/org/opensearch/index/shard/ShardGetServiceTests.java @@ -39,6 +39,7 @@ import org.opensearch.index.engine.Engine; import org.opensearch.index.engine.VersionConflictEngineException; import org.opensearch.index.get.GetResult; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.RoutingFieldMapper; import org.opensearch.search.fetch.subphase.FetchSourceContext; @@ -58,7 +59,7 @@ public void testGetForUpdate() throws IOException { .build(); IndexMetadata metadata = IndexMetadata.builder("test") - .putMapping("test", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1) .build(); @@ -66,16 +67,16 @@ public void testGetForUpdate() throws IOException { recoverShardFromStore(primary); Engine.IndexResult test = indexDoc(primary, "test", "0", "{\"foo\" : \"bar\"}"); assertTrue(primary.getEngine().refreshNeeded()); - GetResult testGet = primary.getService().getForUpdate("test", "0", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); + GetResult testGet = primary.getService().getForUpdate("0", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertFalse(testGet.getFields().containsKey(RoutingFieldMapper.NAME)); assertEquals(new String(testGet.source(), StandardCharsets.UTF_8), "{\"foo\" : \"bar\"}"); try (Engine.Searcher searcher = primary.getEngine().acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { assertEquals(searcher.getIndexReader().maxDoc(), 1); // we refreshed } - Engine.IndexResult test1 = indexDoc(primary, "test", "1", "{\"foo\" : \"baz\"}", XContentType.JSON, "foobar"); + Engine.IndexResult test1 = indexDoc(primary, "1", "{\"foo\" : \"baz\"}", XContentType.JSON, "foobar"); assertTrue(primary.getEngine().refreshNeeded()); - GetResult testGet1 = primary.getService().getForUpdate("test", "1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); + GetResult testGet1 = primary.getService().getForUpdate("1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertEquals(new String(testGet1.source(), StandardCharsets.UTF_8), "{\"foo\" : \"baz\"}"); assertTrue(testGet1.getFields().containsKey(RoutingFieldMapper.NAME)); assertEquals("foobar", testGet1.getFields().get(RoutingFieldMapper.NAME).getValue()); @@ -88,25 +89,19 @@ public void testGetForUpdate() throws IOException { } // now again from the reader - Engine.IndexResult test2 = indexDoc(primary, "test", "1", "{\"foo\" : \"baz\"}", XContentType.JSON, "foobar"); + Engine.IndexResult test2 = indexDoc(primary, "1", "{\"foo\" : \"baz\"}", XContentType.JSON, "foobar"); assertTrue(primary.getEngine().refreshNeeded()); - testGet1 = primary.getService().getForUpdate("test", "1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); + testGet1 = primary.getService().getForUpdate("1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertEquals(new String(testGet1.source(), StandardCharsets.UTF_8), "{\"foo\" : \"baz\"}"); assertTrue(testGet1.getFields().containsKey(RoutingFieldMapper.NAME)); assertEquals("foobar", testGet1.getFields().get(RoutingFieldMapper.NAME).getValue()); final long primaryTerm = primary.getOperationPrimaryTerm(); - testGet1 = primary.getService().getForUpdate("test", "1", test2.getSeqNo(), primaryTerm); + testGet1 = primary.getService().getForUpdate("1", test2.getSeqNo(), primaryTerm); assertEquals(new String(testGet1.source(), StandardCharsets.UTF_8), "{\"foo\" : \"baz\"}"); - expectThrows( - VersionConflictEngineException.class, - () -> primary.getService().getForUpdate("test", "1", test2.getSeqNo() + 1, primaryTerm) - ); - expectThrows( - VersionConflictEngineException.class, - () -> primary.getService().getForUpdate("test", "1", test2.getSeqNo(), primaryTerm + 1) - ); + expectThrows(VersionConflictEngineException.class, () -> primary.getService().getForUpdate("1", test2.getSeqNo() + 1, primaryTerm)); + expectThrows(VersionConflictEngineException.class, () -> primary.getService().getForUpdate("1", test2.getSeqNo(), primaryTerm + 1)); closeShards(primary); } @@ -139,7 +134,6 @@ private void runGetFromTranslogWithOptions( IndexMetadata metadata = IndexMetadata.builder("test") .putMapping( - "test", "{ \"properties\": { \"foo\": { \"type\": " + fieldType + ", \"store\": true }, " @@ -154,18 +148,18 @@ private void runGetFromTranslogWithOptions( .build(); IndexShard primary = newShard(new ShardId(metadata.getIndex(), 0), true, "n1", metadata, null); recoverShardFromStore(primary); - Engine.IndexResult test = indexDoc(primary, "test", "0", docToIndex); + Engine.IndexResult test = indexDoc(primary, MapperService.SINGLE_MAPPING_NAME, "0", docToIndex); assertTrue(primary.getEngine().refreshNeeded()); - GetResult testGet = primary.getService().getForUpdate("test", "0", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); + GetResult testGet = primary.getService().getForUpdate("0", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertFalse(testGet.getFields().containsKey(RoutingFieldMapper.NAME)); assertEquals(new String(testGet.source() == null ? new byte[0] : testGet.source(), StandardCharsets.UTF_8), expectedResult); try (Engine.Searcher searcher = primary.getEngine().acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { assertEquals(searcher.getIndexReader().maxDoc(), 1); // we refreshed } - Engine.IndexResult test1 = indexDoc(primary, "test", "1", docToIndex, XContentType.JSON, "foobar"); + Engine.IndexResult test1 = indexDoc(primary, "1", docToIndex, XContentType.JSON, "foobar"); assertTrue(primary.getEngine().refreshNeeded()); - GetResult testGet1 = primary.getService().getForUpdate("test", "1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); + GetResult testGet1 = primary.getService().getForUpdate("1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertEquals(new String(testGet1.source() == null ? new byte[0] : testGet1.source(), StandardCharsets.UTF_8), expectedResult); assertTrue(testGet1.getFields().containsKey(RoutingFieldMapper.NAME)); assertEquals("foobar", testGet1.getFields().get(RoutingFieldMapper.NAME).getValue()); @@ -177,10 +171,10 @@ private void runGetFromTranslogWithOptions( assertEquals(searcher.getIndexReader().maxDoc(), 2); } - Engine.IndexResult test2 = indexDoc(primary, "test", "2", docToIndex, XContentType.JSON, "foobar"); + Engine.IndexResult test2 = indexDoc(primary, "2", docToIndex, XContentType.JSON, "foobar"); assertTrue(primary.getEngine().refreshNeeded()); GetResult testGet2 = primary.getService() - .get("test", "2", new String[] { "foo" }, true, 1, VersionType.INTERNAL, FetchSourceContext.FETCH_SOURCE); + .get("2", new String[] { "foo" }, true, 1, VersionType.INTERNAL, FetchSourceContext.FETCH_SOURCE); assertEquals(new String(testGet2.source() == null ? new byte[0] : testGet2.source(), StandardCharsets.UTF_8), expectedResult); assertTrue(testGet2.getFields().containsKey(RoutingFieldMapper.NAME)); assertTrue(testGet2.getFields().containsKey("foo")); @@ -193,8 +187,7 @@ private void runGetFromTranslogWithOptions( assertEquals(searcher.getIndexReader().maxDoc(), 3); } - testGet2 = primary.getService() - .get("test", "2", new String[] { "foo" }, true, 1, VersionType.INTERNAL, FetchSourceContext.FETCH_SOURCE); + testGet2 = primary.getService().get("2", new String[] { "foo" }, true, 1, VersionType.INTERNAL, FetchSourceContext.FETCH_SOURCE); assertEquals(new String(testGet2.source() == null ? new byte[0] : testGet2.source(), StandardCharsets.UTF_8), expectedResult); assertTrue(testGet2.getFields().containsKey(RoutingFieldMapper.NAME)); assertTrue(testGet2.getFields().containsKey("foo")); @@ -210,7 +203,7 @@ public void testTypelessGetForUpdate() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("index") - .putMapping("some_type", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1) .build(); @@ -219,13 +212,7 @@ public void testTypelessGetForUpdate() throws IOException { Engine.IndexResult indexResult = indexDoc(shard, "some_type", "0", "{\"foo\" : \"bar\"}"); assertTrue(indexResult.isCreated()); - GetResult getResult = shard.getService().getForUpdate("some_type", "0", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); - assertTrue(getResult.isExists()); - - getResult = shard.getService().getForUpdate("some_other_type", "0", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); - assertFalse(getResult.isExists()); - - getResult = shard.getService().getForUpdate("_doc", "0", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); + GetResult getResult = shard.getService().getForUpdate("0", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertTrue(getResult.isExists()); closeShards(shard); diff --git a/server/src/test/java/org/opensearch/index/termvectors/TermVectorsServiceTests.java b/server/src/test/java/org/opensearch/index/termvectors/TermVectorsServiceTests.java index a725590afecbb..f35911c8a3553 100644 --- a/server/src/test/java/org/opensearch/index/termvectors/TermVectorsServiceTests.java +++ b/server/src/test/java/org/opensearch/index/termvectors/TermVectorsServiceTests.java @@ -72,7 +72,7 @@ public void testTook() throws Exception { createIndex("test", Settings.EMPTY, "type1", mapping); ensureGreen(); - client().prepareIndex("test", "type1", "0").setSource("field", "foo bar").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("0").setSource("field", "foo bar").setRefreshPolicy(IMMEDIATE).get(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService(resolveIndex("test")); @@ -81,7 +81,7 @@ public void testTook() throws Exception { List longs = Stream.of(abs(randomLong()), abs(randomLong())).sorted().collect(toList()); - TermVectorsRequest request = new TermVectorsRequest("test", "type1", "0"); + TermVectorsRequest request = new TermVectorsRequest("test", "0"); TermVectorsResponse response = TermVectorsService.getTermVectors(shard, request, longs.iterator()::next); assertThat(response, notNullValue()); @@ -107,12 +107,12 @@ public void testDocFreqs() throws IOException { BulkRequestBuilder bulk = client().prepareBulk(); for (int i = 0; i < max; i++) { bulk.add( - client().prepareIndex("test", "_doc", Integer.toString(i)).setSource("text", "the quick brown fox jumped over the lazy dog") + client().prepareIndex("test").setId(Integer.toString(i)).setSource("text", "the quick brown fox jumped over the lazy dog") ); } bulk.get(); - TermVectorsRequest request = new TermVectorsRequest("test", "_doc", "0").termStatistics(true); + TermVectorsRequest request = new TermVectorsRequest("test", "0").termStatistics(true); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService(resolveIndex("test")); @@ -148,12 +148,12 @@ public void testWithIndexedPhrases() throws IOException { BulkRequestBuilder bulk = client().prepareBulk(); for (int i = 0; i < max; i++) { bulk.add( - client().prepareIndex("test", "_doc", Integer.toString(i)).setSource("text", "the quick brown fox jumped over the lazy dog") + client().prepareIndex("test").setId(Integer.toString(i)).setSource("text", "the quick brown fox jumped over the lazy dog") ); } bulk.get(); - TermVectorsRequest request = new TermVectorsRequest("test", "_doc", "0").termStatistics(true); + TermVectorsRequest request = new TermVectorsRequest("test", "0").termStatistics(true); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService(resolveIndex("test")); diff --git a/server/src/test/java/org/opensearch/index/translog/TranslogTests.java b/server/src/test/java/org/opensearch/index/translog/TranslogTests.java index 5614e07d7104d..35fec28a1c798 100644 --- a/server/src/test/java/org/opensearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/opensearch/index/translog/TranslogTests.java @@ -358,13 +358,13 @@ public void testSimpleOperations() throws IOException { assertThat(snapshot, SnapshotMatchers.size(0)); } - addToTranslogAndList(translog, ops, new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 })); + addToTranslogAndList(translog, ops, new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 })); try (Translog.Snapshot snapshot = translog.newSnapshot()) { assertThat(snapshot, SnapshotMatchers.equalsTo(ops)); assertThat(snapshot.totalOperations(), equalTo(ops.size())); } - addToTranslogAndList(translog, ops, new Translog.Delete("test", "2", 1, primaryTerm.get(), newUid("2"))); + addToTranslogAndList(translog, ops, new Translog.Delete("2", 1, primaryTerm.get())); try (Translog.Snapshot snapshot = translog.newSnapshot()) { assertThat(snapshot, SnapshotMatchers.equalsTo(ops)); assertThat(snapshot.totalOperations(), equalTo(ops.size())); @@ -383,7 +383,7 @@ public void testSimpleOperations() throws IOException { Translog.Delete delete = (Translog.Delete) snapshot.next(); assertNotNull(delete); - assertThat(delete.uid(), equalTo(newUid("2"))); + assertThat(delete.id(), equalTo("2")); Translog.NoOp noOp = (Translog.NoOp) snapshot.next(); assertNotNull(noOp); @@ -454,34 +454,34 @@ public void testStats() throws IOException { assertThat(stats.estimatedNumberOfOperations(), equalTo(0)); } assertThat((int) firstOperationPosition, greaterThan(CodecUtil.headerLength(TranslogHeader.TRANSLOG_CODEC))); - translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 })); + translog.add(new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 })); { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(1)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(162L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(157L)); assertThat(stats.getUncommittedOperations(), equalTo(1)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(107L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(102L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L)); } - translog.add(new Translog.Delete("test", "2", 1, primaryTerm.get(), newUid("2"))); + translog.add(new Translog.Delete("2", 1, primaryTerm.get())); { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(2)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(210L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(193L)); assertThat(stats.getUncommittedOperations(), equalTo(2)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(155L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(138L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L)); } - translog.add(new Translog.Delete("test", "3", 2, primaryTerm.get(), newUid("3"))); + translog.add(new Translog.Delete("3", 2, primaryTerm.get())); { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(3)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(258L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(229L)); assertThat(stats.getUncommittedOperations(), equalTo(3)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(203L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(174L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L)); } @@ -489,9 +489,9 @@ public void testStats() throws IOException { { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(4)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(300L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(271L)); assertThat(stats.getUncommittedOperations(), equalTo(4)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(245L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(216L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L)); } @@ -499,9 +499,9 @@ public void testStats() throws IOException { { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(4)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(355L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(326L)); assertThat(stats.getUncommittedOperations(), equalTo(4)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(300L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(271L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L)); } @@ -511,7 +511,7 @@ public void testStats() throws IOException { stats.writeTo(out); final TranslogStats copy = new TranslogStats(out.bytes().streamInput()); assertThat(copy.estimatedNumberOfOperations(), equalTo(4)); - assertThat(copy.getTranslogSizeInBytes(), equalTo(355L)); + assertThat(copy.getTranslogSizeInBytes(), equalTo(326L)); try (XContentBuilder builder = XContentFactory.jsonBuilder()) { builder.startObject(); @@ -521,9 +521,9 @@ public void testStats() throws IOException { Strings.toString(builder), equalTo( "{\"translog\":{\"operations\":4,\"size_in_bytes\":" - + 355 + + 326 + ",\"uncommitted_operations\":4,\"uncommitted_size_in_bytes\":" - + 300 + + 271 + ",\"earliest_last_modified_age\":" + stats.getEarliestLastModifiedAge() + "}}" @@ -537,7 +537,7 @@ public void testStats() throws IOException { long lastModifiedAge = System.currentTimeMillis() - translog.getCurrent().getLastModifiedTime(); final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(4)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(355L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(326L)); assertThat(stats.getUncommittedOperations(), equalTo(0)); assertThat(stats.getUncommittedSizeInBytes(), equalTo(firstOperationPosition)); assertThat(stats.getEarliestLastModifiedAge(), greaterThanOrEqualTo(lastModifiedAge)); @@ -553,7 +553,7 @@ public void testUncommittedOperations() throws Exception { int uncommittedOps = 0; int operationsInLastGen = 0; for (int i = 0; i < operations; i++) { - translog.add(new Translog.Index("test", Integer.toString(i), i, primaryTerm.get(), new byte[] { 1 })); + translog.add(new Translog.Index(Integer.toString(i), i, primaryTerm.get(), new byte[] { 1 })); uncommittedOps++; operationsInLastGen++; if (rarely()) { @@ -634,7 +634,7 @@ public void testBasicSnapshot() throws IOException { assertThat(snapshot, SnapshotMatchers.size(0)); } - addToTranslogAndList(translog, ops, new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 })); + addToTranslogAndList(translog, ops, new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 })); try (Translog.Snapshot snapshot = translog.newSnapshot(0, Long.MAX_VALUE)) { assertThat(snapshot, SnapshotMatchers.equalsTo(ops)); @@ -656,9 +656,9 @@ public void testBasicSnapshot() throws IOException { public void testReadLocation() throws IOException { ArrayList ops = new ArrayList<>(); ArrayList locs = new ArrayList<>(); - locs.add(addToTranslogAndList(translog, ops, new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 }))); - locs.add(addToTranslogAndList(translog, ops, new Translog.Index("test", "2", 1, primaryTerm.get(), new byte[] { 1 }))); - locs.add(addToTranslogAndList(translog, ops, new Translog.Index("test", "3", 2, primaryTerm.get(), new byte[] { 1 }))); + locs.add(addToTranslogAndList(translog, ops, new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 }))); + locs.add(addToTranslogAndList(translog, ops, new Translog.Index("2", 1, primaryTerm.get(), new byte[] { 1 }))); + locs.add(addToTranslogAndList(translog, ops, new Translog.Index("3", 2, primaryTerm.get(), new byte[] { 1 }))); int i = 0; for (Translog.Operation op : ops) { assertEquals(op, translog.readOperation(locs.get(i++))); @@ -674,16 +674,16 @@ public void testSnapshotWithNewTranslog() throws IOException { toClose.add(snapshot); assertThat(snapshot, SnapshotMatchers.size(0)); - addToTranslogAndList(translog, ops, new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 })); + addToTranslogAndList(translog, ops, new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 })); Translog.Snapshot snapshot1 = translog.newSnapshot(); toClose.add(snapshot1); - addToTranslogAndList(translog, ops, new Translog.Index("test", "2", 1, primaryTerm.get(), new byte[] { 2 })); + addToTranslogAndList(translog, ops, new Translog.Index("2", 1, primaryTerm.get(), new byte[] { 2 })); assertThat(snapshot1, SnapshotMatchers.equalsTo(ops.get(0))); translog.rollGeneration(); - addToTranslogAndList(translog, ops, new Translog.Index("test", "3", 2, primaryTerm.get(), new byte[] { 3 })); + addToTranslogAndList(translog, ops, new Translog.Index("3", 2, primaryTerm.get(), new byte[] { 3 })); Translog.Snapshot snapshot2 = translog.newSnapshot(); toClose.add(snapshot2); @@ -697,7 +697,7 @@ public void testSnapshotWithNewTranslog() throws IOException { public void testSnapshotOnClosedTranslog() throws IOException { assertTrue(Files.exists(translogDir.resolve(Translog.getFilename(1)))); - translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 })); + translog.add(new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 })); translog.close(); AlreadyClosedException ex = expectThrows(AlreadyClosedException.class, () -> translog.newSnapshot()); assertEquals(ex.getMessage(), "translog is already closed"); @@ -719,13 +719,7 @@ public void testRangeSnapshot() throws Exception { } List ops = new ArrayList<>(seqNos.size()); for (long seqNo : seqNos) { - Translog.Index op = new Translog.Index( - "_doc", - randomAlphaOfLength(10), - seqNo, - primaryTerm.get(), - new byte[] { randomByte() } - ); + Translog.Index op = new Translog.Index(randomAlphaOfLength(10), seqNo, primaryTerm.get(), new byte[] { randomByte() }); translog.add(op); ops.add(op); } @@ -786,13 +780,7 @@ private Long populateTranslogOps(boolean withMissingOps) throws IOException { Collections.shuffle(seqNos, new Random(100)); List ops = new ArrayList<>(seqNos.size()); for (long seqNo : seqNos) { - Translog.Index op = new Translog.Index( - "_doc", - randomAlphaOfLength(10), - seqNo, - primaryTerm.get(), - new byte[] { randomByte() } - ); + Translog.Index op = new Translog.Index(randomAlphaOfLength(10), seqNo, primaryTerm.get(), new byte[] { randomByte() }); boolean shouldAdd = !withMissingOps || seqNo % 4 != 0; if (shouldAdd) { translog.add(op); @@ -928,14 +916,13 @@ public void testConcurrentWritesWithVaryingSize() throws Throwable { Translog.Index expIndexOp = (Translog.Index) expectedOp; assertEquals(expIndexOp.id(), indexOp.id()); assertEquals(expIndexOp.routing(), indexOp.routing()); - assertEquals(expIndexOp.type(), indexOp.type()); assertEquals(expIndexOp.source(), indexOp.source()); assertEquals(expIndexOp.version(), indexOp.version()); break; case DELETE: Translog.Delete delOp = (Translog.Delete) op; Translog.Delete expDelOp = (Translog.Delete) expectedOp; - assertEquals(expDelOp.uid(), delOp.uid()); + assertEquals(expDelOp.id(), delOp.id()); assertEquals(expDelOp.version(), delOp.version()); break; case NO_OP: @@ -962,7 +949,7 @@ public void testTranslogCorruption() throws Exception { int translogOperations = randomIntBetween(10, 1000); for (int op = 0; op < translogOperations; op++) { String ascii = randomAlphaOfLengthBetween(1, 50); - locations.add(translog.add(new Translog.Index("test", "" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8")))); + locations.add(translog.add(new Translog.Index("" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8")))); if (rarely()) { translog.rollGeneration(); @@ -989,7 +976,7 @@ public void testTruncatedTranslogs() throws Exception { int translogOperations = randomIntBetween(10, 100); for (int op = 0; op < translogOperations; op++) { String ascii = randomAlphaOfLengthBetween(1, 50); - locations.add(translog.add(new Translog.Index("test", "" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8")))); + locations.add(translog.add(new Translog.Index("" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8")))); } translog.sync(); @@ -1034,7 +1021,7 @@ private Term newUid(String id) { public void testVerifyTranslogIsNotDeleted() throws IOException { assertFileIsPresent(translog, 1); - translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 })); + translog.add(new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 })); try (Translog.Snapshot snapshot = translog.newSnapshot()) { assertThat(snapshot, SnapshotMatchers.size(1)); assertFileIsPresent(translog, 1); @@ -1086,10 +1073,10 @@ public void doRun() throws BrokenBarrierException, InterruptedException, IOExcep switch (type) { case CREATE: case INDEX: - op = new Translog.Index("type", "" + id, id, primaryTerm.get(), new byte[] { (byte) id }); + op = new Translog.Index("" + id, id, primaryTerm.get(), new byte[] { (byte) id }); break; case DELETE: - op = new Translog.Delete("test", Long.toString(id), id, primaryTerm.get(), newUid(Long.toString(id))); + op = new Translog.Delete(Long.toString(id), id, primaryTerm.get()); break; case NO_OP: op = new Translog.NoOp(id, 1, Long.toString(id)); @@ -1248,7 +1235,7 @@ public void testSyncUpTo() throws IOException { for (int op = 0; op < translogOperations; op++) { int seqNo = ++count; final Translog.Location location = translog.add( - new Translog.Index("test", "" + op, seqNo, primaryTerm.get(), Integer.toString(seqNo).getBytes(Charset.forName("UTF-8"))) + new Translog.Index("" + op, seqNo, primaryTerm.get(), Integer.toString(seqNo).getBytes(Charset.forName("UTF-8"))) ); if (randomBoolean()) { assertTrue("at least one operation pending", translog.syncNeeded()); @@ -1257,13 +1244,7 @@ public void testSyncUpTo() throws IOException { assertFalse("the last call to ensureSycned synced all previous ops", translog.syncNeeded()); seqNo = ++count; translog.add( - new Translog.Index( - "test", - "" + op, - seqNo, - primaryTerm.get(), - Integer.toString(seqNo).getBytes(Charset.forName("UTF-8")) - ) + new Translog.Index("" + op, seqNo, primaryTerm.get(), Integer.toString(seqNo).getBytes(Charset.forName("UTF-8"))) ); assertTrue("one pending operation", translog.syncNeeded()); assertFalse("this op has been synced before", translog.ensureSynced(location)); // not syncing now @@ -1293,7 +1274,7 @@ public void testSyncUpToStream() throws IOException { translog.rollGeneration(); } final Translog.Location location = translog.add( - new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(++count).getBytes(Charset.forName("UTF-8"))) + new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(++count).getBytes(Charset.forName("UTF-8"))) ); locations.add(location); } @@ -1325,7 +1306,7 @@ public void testLocationComparison() throws IOException { for (int op = 0; op < translogOperations; op++) { locations.add( translog.add( - new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(++count).getBytes(Charset.forName("UTF-8"))) + new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(++count).getBytes(Charset.forName("UTF-8"))) ) ); if (rarely() && translogOperations > op + 1) { @@ -1364,9 +1345,7 @@ public void testBasicCheckpoint() throws IOException { long lastSyncedGlobalCheckpoint = globalCheckpoint.get(); for (int op = 0; op < translogOperations; op++) { locations.add( - translog.add( - new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) - ) + translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))) ); if (randomBoolean()) { globalCheckpoint.set(globalCheckpoint.get() + randomIntBetween(1, 16)); @@ -1380,7 +1359,6 @@ public void testBasicCheckpoint() throws IOException { assertEquals(translogOperations, translog.totalOperations()); translog.add( new Translog.Index( - "test", "" + translogOperations, translogOperations, primaryTerm.get(), @@ -1730,9 +1708,7 @@ public void testBasicRecovery() throws IOException { final boolean commitOften = randomBoolean(); for (int op = 0; op < translogOperations; op++) { locations.add( - translog.add( - new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) - ) + translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))) ); final boolean commit = commitOften ? frequently() : rarely(); if (commit && op < translogOperations - 1) { @@ -1791,9 +1767,7 @@ public void testRecoveryUncommitted() throws IOException { final boolean sync = randomBoolean(); for (int op = 0; op < translogOperations; op++) { locations.add( - translog.add( - new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) - ) + translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))) ); if (op == prepareOp) { translogGeneration = translog.getGeneration(); @@ -1878,9 +1852,7 @@ public void testRecoveryUncommittedFileExists() throws IOException { final boolean sync = randomBoolean(); for (int op = 0; op < translogOperations; op++) { locations.add( - translog.add( - new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) - ) + translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))) ); if (op == prepareOp) { translogGeneration = translog.getGeneration(); @@ -1968,7 +1940,7 @@ public void testRecoveryUncommittedCorruptedCheckpoint() throws IOException { Translog.TranslogGeneration translogGeneration = null; final boolean sync = randomBoolean(); for (int op = 0; op < translogOperations; op++) { - translog.add(new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(StandardCharsets.UTF_8))); + translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(StandardCharsets.UTF_8))); if (op == prepareOp) { translogGeneration = translog.getGeneration(); translog.rollGeneration(); @@ -2003,7 +1975,7 @@ public void testRecoveryUncommittedCorruptedCheckpoint() throws IOException { assertThat( translogCorruptedException.getMessage(), endsWith( - "] is corrupted, checkpoint file translog-3.ckp already exists but has corrupted content: expected Checkpoint{offset=3025, " + "] is corrupted, checkpoint file translog-3.ckp already exists but has corrupted content: expected Checkpoint{offset=2750, " + "numOps=55, generation=3, minSeqNo=45, maxSeqNo=99, globalCheckpoint=-1, minTranslogGeneration=1, trimmedAboveSeqNo=-2} " + "but got Checkpoint{offset=0, numOps=0, generation=0, minSeqNo=-1, maxSeqNo=-1, globalCheckpoint=-1, " + "minTranslogGeneration=0, trimmedAboveSeqNo=-2}" @@ -2050,7 +2022,6 @@ public void testSnapshotFromStreamInput() throws IOException { int translogOperations = randomIntBetween(10, 100); for (int op = 0; op < translogOperations; op++) { Translog.Index test = new Translog.Index( - "test", "" + op, op, primaryTerm.get(), @@ -2073,7 +2044,7 @@ public void testSnapshotCurrentHasUnexpectedOperationsForTrimmedOperations() thr for (int op = 0; op < extraDocs; op++) { String ascii = randomAlphaOfLengthBetween(1, 50); - Translog.Index operation = new Translog.Index("test", "" + op, op, primaryTerm.get() - op, ascii.getBytes("UTF-8")); + Translog.Index operation = new Translog.Index("" + op, op, primaryTerm.get() - op, ascii.getBytes("UTF-8")); translog.add(operation); } @@ -2093,13 +2064,7 @@ public void testSnapshotCurrentHasUnexpectedOperationsForTrimmedOperations() thr translog.rollGeneration(); // add a single operation to current with seq# > trimmed seq# but higher primary term - Translog.Index operation = new Translog.Index( - "test", - "" + 1, - 1L, - primaryTerm.get(), - randomAlphaOfLengthBetween(1, 50).getBytes("UTF-8") - ); + Translog.Index operation = new Translog.Index("" + 1, 1L, primaryTerm.get(), randomAlphaOfLengthBetween(1, 50).getBytes("UTF-8")); translog.add(operation); // it is possible to trim after generation rollover @@ -2129,7 +2094,7 @@ public void testSnapshotTrimmedOperations() throws Exception { } // use ongoing primaryTerms - or the same as it was - Translog.Index operation = new Translog.Index("test", "" + op, op, primaryTerm.get(), source.get().getBytes("UTF-8")); + Translog.Index operation = new Translog.Index("" + op, op, primaryTerm.get(), source.get().getBytes("UTF-8")); translog.add(operation); inMemoryTranslog.add(operation); allOperations.add(operation); @@ -2213,7 +2178,7 @@ public void testRandomExceptionsOnTrimOperations() throws Exception { Randomness.shuffle(ops); for (int op : ops) { String ascii = randomAlphaOfLengthBetween(1, 50); - Translog.Index operation = new Translog.Index("test", "" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8")); + Translog.Index operation = new Translog.Index("" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8")); failableTLog.add(operation); } @@ -2271,12 +2236,12 @@ public void testLocationHashCodeEquals() throws IOException { for (int op = 0; op < translogOperations; op++) { locations.add( translog.add( - new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) + new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) ) ); locations2.add( translog2.add( - new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) + new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) ) ); } @@ -2305,9 +2270,7 @@ public void testOpenForeignTranslog() throws IOException { int firstUncommitted = 0; for (int op = 0; op < translogOperations; op++) { locations.add( - translog.add( - new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) - ) + translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))) ); if (randomBoolean()) { translog.rollGeneration(); @@ -2355,10 +2318,10 @@ public void testOpenForeignTranslog() throws IOException { } public void testFailOnClosedWrite() throws IOException { - translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8")))); + translog.add(new Translog.Index("1", 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8")))); translog.close(); try { - translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8")))); + translog.add(new Translog.Index("1", 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8")))); fail("closed"); } catch (AlreadyClosedException ex) { // all is well @@ -2442,7 +2405,6 @@ public void run() { case CREATE: case INDEX: op = new Translog.Index( - "test", threadId + "_" + opCount, seqNoGenerator.getAndIncrement(), primaryTerm.get(), @@ -2451,9 +2413,7 @@ public void run() { break; case DELETE: op = new Translog.Delete( - "test", threadId + "_" + opCount, - new Term("_uid", threadId + "_" + opCount), seqNoGenerator.getAndIncrement(), primaryTerm.get(), 1 + randomInt(100000) @@ -2499,7 +2459,6 @@ public void testFailFlush() throws IOException { locations.add( translog.add( new Translog.Index( - "test", "" + opsSynced, opsSynced, primaryTerm.get(), @@ -2529,7 +2488,6 @@ public void testFailFlush() throws IOException { locations.add( translog.add( new Translog.Index( - "test", "" + opsSynced, opsSynced, primaryTerm.get(), @@ -2611,7 +2569,6 @@ public void testTranslogOpsCountIsCorrect() throws IOException { locations.add( translog.add( new Translog.Index( - "test", "" + opsAdded, opsAdded, primaryTerm.get(), @@ -2640,13 +2597,11 @@ public void testTragicEventCanBeAnyException() throws IOException { TranslogConfig config = getTranslogConfig(tempDir); Translog translog = getFailableTranslog(fail, config, false, true, null, createTranslogDeletionPolicy()); LineFileDocs lineFileDocs = new LineFileDocs(random()); // writes pretty big docs so we cross buffer boarders regularly - translog.add( - new Translog.Index("test", "1", 0, primaryTerm.get(), lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8"))) - ); + translog.add(new Translog.Index("1", 0, primaryTerm.get(), lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8")))); fail.failAlways(); try { Translog.Location location = translog.add( - new Translog.Index("test", "2", 1, primaryTerm.get(), lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8"))) + new Translog.Index("2", 1, primaryTerm.get(), lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8"))) ); if (randomBoolean()) { translog.ensureSynced(location); @@ -2772,13 +2727,7 @@ public void testRecoveryFromAFutureGenerationCleansUp() throws IOException { int op = 0; for (; op < translogOperations / 2; op++) { translog.add( - new Translog.Index( - "_doc", - Integer.toString(op), - op, - primaryTerm.get(), - Integer.toString(op).getBytes(Charset.forName("UTF-8")) - ) + new Translog.Index(Integer.toString(op), op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) ); if (rarely()) { translog.rollGeneration(); @@ -2788,13 +2737,7 @@ public void testRecoveryFromAFutureGenerationCleansUp() throws IOException { long localCheckpoint = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, op); for (op = translogOperations / 2; op < translogOperations; op++) { translog.add( - new Translog.Index( - "test", - Integer.toString(op), - op, - primaryTerm.get(), - Integer.toString(op).getBytes(Charset.forName("UTF-8")) - ) + new Translog.Index(Integer.toString(op), op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) ); if (rarely()) { translog.rollGeneration(); @@ -2847,13 +2790,7 @@ public void testRecoveryFromFailureOnTrimming() throws IOException { int op = 0; for (; op < translogOperations / 2; op++) { translog.add( - new Translog.Index( - "test", - Integer.toString(op), - op, - primaryTerm.get(), - Integer.toString(op).getBytes(Charset.forName("UTF-8")) - ) + new Translog.Index(Integer.toString(op), op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) ); if (rarely()) { translog.rollGeneration(); @@ -2863,13 +2800,7 @@ public void testRecoveryFromFailureOnTrimming() throws IOException { localCheckpoint = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, op); for (op = translogOperations / 2; op < translogOperations; op++) { translog.add( - new Translog.Index( - "test", - Integer.toString(op), - op, - primaryTerm.get(), - Integer.toString(op).getBytes(Charset.forName("UTF-8")) - ) + new Translog.Index(Integer.toString(op), op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) ); if (rarely()) { translog.rollGeneration(); @@ -3132,7 +3063,7 @@ public void testFailWhileCreateWriteWithRecoveredTLogs() throws IOException { Path tempDir = createTempDir(); TranslogConfig config = getTranslogConfig(tempDir); Translog translog = createTranslog(config); - translog.add(new Translog.Index("test", "boom", 0, primaryTerm.get(), "boom".getBytes(Charset.forName("UTF-8")))); + translog.add(new Translog.Index("boom", 0, primaryTerm.get(), "boom".getBytes(Charset.forName("UTF-8")))); translog.close(); try { new Translog( @@ -3161,7 +3092,7 @@ protected TranslogWriter createWriter( } public void testRecoverWithUnbackedNextGen() throws IOException { - translog.add(new Translog.Index("test", "" + 0, 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8")))); + translog.add(new Translog.Index("" + 0, 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8")))); translog.close(); TranslogConfig config = translog.getConfig(); @@ -3176,7 +3107,7 @@ public void testRecoverWithUnbackedNextGen() throws IOException { assertNotNull("operation 1 must be non-null", op); assertEquals("payload mismatch for operation 1", 1, Integer.parseInt(op.getSource().source.utf8ToString())); - tlog.add(new Translog.Index("test", "" + 1, 1, primaryTerm.get(), Integer.toString(2).getBytes(Charset.forName("UTF-8")))); + tlog.add(new Translog.Index("" + 1, 1, primaryTerm.get(), Integer.toString(2).getBytes(Charset.forName("UTF-8")))); } try (Translog tlog = openTranslog(config, translog.getTranslogUUID()); Translog.Snapshot snapshot = tlog.newSnapshot()) { @@ -3193,7 +3124,7 @@ public void testRecoverWithUnbackedNextGen() throws IOException { } public void testRecoverWithUnbackedNextGenInIllegalState() throws IOException { - translog.add(new Translog.Index("test", "" + 0, 0, primaryTerm.get(), Integer.toString(0).getBytes(Charset.forName("UTF-8")))); + translog.add(new Translog.Index("" + 0, 0, primaryTerm.get(), Integer.toString(0).getBytes(Charset.forName("UTF-8")))); translog.close(); TranslogConfig config = translog.getConfig(); Path ckp = config.getTranslogPath().resolve(Translog.CHECKPOINT_FILE_NAME); @@ -3217,7 +3148,7 @@ public void testRecoverWithUnbackedNextGenInIllegalState() throws IOException { } public void testRecoverWithUnbackedNextGenAndFutureFile() throws IOException { - translog.add(new Translog.Index("test", "" + 0, 0, primaryTerm.get(), Integer.toString(0).getBytes(Charset.forName("UTF-8")))); + translog.add(new Translog.Index("" + 0, 0, primaryTerm.get(), Integer.toString(0).getBytes(Charset.forName("UTF-8")))); translog.close(); TranslogConfig config = translog.getConfig(); final String translogUUID = translog.getTranslogUUID(); @@ -3247,7 +3178,7 @@ public void testRecoverWithUnbackedNextGenAndFutureFile() throws IOException { assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.utf8ToString())); } } - tlog.add(new Translog.Index("test", "" + 1, 1, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8")))); + tlog.add(new Translog.Index("" + 1, 1, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8")))); } TranslogException ex = expectThrows( @@ -3293,7 +3224,7 @@ public void testWithRandomException() throws IOException { for (int opsAdded = 0; opsAdded < numOps; opsAdded++) { String doc = lineFileDocs.nextDoc().toString(); failableTLog.add( - new Translog.Index("test", "" + opsAdded, opsAdded, primaryTerm.get(), doc.getBytes(Charset.forName("UTF-8"))) + new Translog.Index("" + opsAdded, opsAdded, primaryTerm.get(), doc.getBytes(Charset.forName("UTF-8"))) ); unsynced.add(doc); if (randomBoolean()) { @@ -3464,7 +3395,7 @@ public void testLegacyCheckpointVersion() throws IOException { * Tests that closing views after the translog is fine and we can reopen the translog */ public void testPendingDelete() throws IOException { - translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 })); + translog.add(new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 })); translog.rollGeneration(); TranslogConfig config = translog.getConfig(); final String translogUUID = translog.getTranslogUUID(); @@ -3478,10 +3409,10 @@ public void testPendingDelete() throws IOException { primaryTerm::get, seqNo -> {} ); - translog.add(new Translog.Index("test", "2", 1, primaryTerm.get(), new byte[] { 2 })); + translog.add(new Translog.Index("2", 1, primaryTerm.get(), new byte[] { 2 })); translog.rollGeneration(); Closeable lock = translog.acquireRetentionLock(); - translog.add(new Translog.Index("test", "3", 2, primaryTerm.get(), new byte[] { 3 })); + translog.add(new Translog.Index("3", 2, primaryTerm.get(), new byte[] { 3 })); translog.close(); IOUtils.close(lock); translog = new Translog( @@ -3515,17 +3446,7 @@ public void testTranslogOpSerialization() throws Exception { document.add(seqID.seqNo); document.add(seqID.seqNoDocValue); document.add(seqID.primaryTerm); - ParsedDocument doc = new ParsedDocument( - versionField, - seqID, - "1", - "type", - null, - Arrays.asList(document), - B_1, - XContentType.JSON, - null - ); + ParsedDocument doc = new ParsedDocument(versionField, seqID, "1", null, Arrays.asList(document), B_1, XContentType.JSON, null); Engine.Index eIndex = new Engine.Index( newUid(doc), @@ -3554,7 +3475,6 @@ public void testTranslogOpSerialization() throws Exception { assertEquals(index, serializedIndex); Engine.Delete eDelete = new Engine.Delete( - doc.type(), doc.id(), newUid(doc), randomSeqNum, @@ -3793,7 +3713,6 @@ public void testSnapshotReadOperationInReverse() throws Exception { final int operations = randomIntBetween(1, 100); for (int i = 0; i < operations; i++) { Translog.Index op = new Translog.Index( - "doc", randomAlphaOfLength(10), seqNo.getAndIncrement(), primaryTerm.get(), @@ -3823,7 +3742,7 @@ public void testSnapshotDedupOperations() throws Exception { List batch = LongStream.rangeClosed(0, between(0, 500)).boxed().collect(Collectors.toList()); Randomness.shuffle(batch); for (Long seqNo : batch) { - Translog.Index op = new Translog.Index("doc", randomAlphaOfLength(10), seqNo, primaryTerm.get(), new byte[] { 1 }); + Translog.Index op = new Translog.Index(randomAlphaOfLength(10), seqNo, primaryTerm.get(), new byte[] { 1 }); translog.add(op); latestOperations.put(op.seqNo(), op); } @@ -3838,7 +3757,7 @@ public void testSnapshotDedupOperations() throws Exception { public void testCloseSnapshotTwice() throws Exception { int numOps = between(0, 10); for (int i = 0; i < numOps; i++) { - Translog.Index op = new Translog.Index("doc", randomAlphaOfLength(10), i, primaryTerm.get(), new byte[] { 1 }); + Translog.Index op = new Translog.Index(randomAlphaOfLength(10), i, primaryTerm.get(), new byte[] { 1 }); translog.add(op); if (randomBoolean()) { translog.rollGeneration(); @@ -3912,7 +3831,7 @@ public void testMaxSeqNo() throws Exception { Randomness.shuffle(seqNos); for (long seqNo : seqNos) { if (frequently()) { - translog.add(new Translog.Index("test", "id", seqNo, primaryTerm.get(), new byte[] { 1 })); + translog.add(new Translog.Index("id", seqNo, primaryTerm.get(), new byte[] { 1 })); maxSeqNoPerGeneration.compute( translog.currentFileGeneration(), (key, existing) -> existing == null ? seqNo : Math.max(existing, seqNo) @@ -4050,9 +3969,7 @@ public void testSyncConcurrently() throws Exception { int iterations = randomIntBetween(10, 100); for (int i = 0; i < iterations; i++) { List ops = IntStream.range(0, between(1, 10)) - .mapToObj( - n -> new Translog.Index("test", "1", nextSeqNo.incrementAndGet(), primaryTerm.get(), new byte[] { 1 }) - ) + .mapToObj(n -> new Translog.Index("1", nextSeqNo.incrementAndGet(), primaryTerm.get(), new byte[] { 1 })) .collect(Collectors.toList()); try { Translog.Location location = null; @@ -4134,7 +4051,7 @@ void syncBeforeRollGeneration() { } }; try { - translog.add(new Translog.Index("1", "_doc", 1, primaryTerm.get(), new byte[] { 1 })); + translog.add(new Translog.Index("1", 1, primaryTerm.get(), new byte[] { 1 })); failedToSyncCheckpoint.set(true); expectThrows(IOException.class, translog::rollGeneration); final AlreadyClosedException alreadyClosedException = expectThrows(AlreadyClosedException.class, translog::rollGeneration); diff --git a/server/src/test/java/org/opensearch/indices/IndexingMemoryControllerTests.java b/server/src/test/java/org/opensearch/indices/IndexingMemoryControllerTests.java index 2c32e419b27db..c68ad7eaba82e 100644 --- a/server/src/test/java/org/opensearch/indices/IndexingMemoryControllerTests.java +++ b/server/src/test/java/org/opensearch/indices/IndexingMemoryControllerTests.java @@ -367,7 +367,7 @@ public void testThrottling() throws Exception { public void testTranslogRecoveryWorksWithIMC() throws IOException { IndexShard shard = newStartedShard(true); for (int i = 0; i < 100; i++) { - indexDoc(shard, "_doc", Integer.toString(i), "{\"foo\" : \"bar\"}", XContentType.JSON, null); + indexDoc(shard, Integer.toString(i), "{\"foo\" : \"bar\"}", XContentType.JSON, null); } shard.close("simon says", false); AtomicReference shardRef = new AtomicReference<>(); diff --git a/server/src/test/java/org/opensearch/indices/IndicesServiceCloseTests.java b/server/src/test/java/org/opensearch/indices/IndicesServiceCloseTests.java index e7b45f053e919..ff97b87708202 100644 --- a/server/src/test/java/org/opensearch/indices/IndicesServiceCloseTests.java +++ b/server/src/test/java/org/opensearch/indices/IndicesServiceCloseTests.java @@ -179,7 +179,7 @@ public void testCloseWhileOngoingRequest() throws Exception { .prepareCreate("test") .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) ); - node.client().prepareIndex("test", "_doc", "1").setSource(Collections.emptyMap()).get(); + node.client().prepareIndex("test").setId("1").setSource(Collections.emptyMap()).get(); OpenSearchAssertions.assertAllSuccessful(node.client().admin().indices().prepareRefresh("test").get()); assertEquals(2, indicesService.indicesRefCount.refCount()); @@ -213,7 +213,7 @@ public void testCloseAfterRequestHasUsedQueryCache() throws Exception { .put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true) ) ); - node.client().prepareIndex("test", "_doc", "1").setSource(Collections.singletonMap("foo", 3L)).get(); + node.client().prepareIndex("test").setId("1").setSource(Collections.singletonMap("foo", 3L)).get(); OpenSearchAssertions.assertAllSuccessful(node.client().admin().indices().prepareRefresh("test").get()); assertEquals(2, indicesService.indicesRefCount.refCount()); @@ -256,7 +256,7 @@ public void testCloseWhileOngoingRequestUsesQueryCache() throws Exception { .put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true) ) ); - node.client().prepareIndex("test", "_doc", "1").setSource(Collections.singletonMap("foo", 3L)).get(); + node.client().prepareIndex("test").setId("1").setSource(Collections.singletonMap("foo", 3L)).get(); OpenSearchAssertions.assertAllSuccessful(node.client().admin().indices().prepareRefresh("test").get()); assertEquals(2, indicesService.indicesRefCount.refCount()); @@ -298,7 +298,7 @@ public void testCloseWhileOngoingRequestUsesRequestCache() throws Exception { .put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true) ) ); - node.client().prepareIndex("test", "_doc", "1").setSource(Collections.singletonMap("foo", 3L)).get(); + node.client().prepareIndex("test").setId("1").setSource(Collections.singletonMap("foo", 3L)).get(); OpenSearchAssertions.assertAllSuccessful(node.client().admin().indices().prepareRefresh("test").get()); assertEquals(2, indicesService.indicesRefCount.refCount()); diff --git a/server/src/test/java/org/opensearch/indices/IndicesServiceTests.java b/server/src/test/java/org/opensearch/indices/IndicesServiceTests.java index bcd0fa5d54c56..8dd156dfcd0d2 100644 --- a/server/src/test/java/org/opensearch/indices/IndicesServiceTests.java +++ b/server/src/test/java/org/opensearch/indices/IndicesServiceTests.java @@ -272,7 +272,7 @@ public void testDeleteIndexStore() throws Exception { assertNull(meta.index("test")); test = createIndex("test"); - client().prepareIndex("test", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); client().admin().indices().prepareFlush("test").get(); assertHitCount(client().prepareSearch("test").get(), 1); IndexMetadata secondMetadata = clusterService.state().metadata().index("test"); diff --git a/server/src/test/java/org/opensearch/indices/TermsLookupTests.java b/server/src/test/java/org/opensearch/indices/TermsLookupTests.java index fb1462b500ea9..661995a22c507 100644 --- a/server/src/test/java/org/opensearch/indices/TermsLookupTests.java +++ b/server/src/test/java/org/opensearch/indices/TermsLookupTests.java @@ -45,42 +45,36 @@ public class TermsLookupTests extends OpenSearchTestCase { public void testTermsLookup() { String index = randomAlphaOfLengthBetween(1, 10); - String type = randomAlphaOfLengthBetween(1, 10); String id = randomAlphaOfLengthBetween(1, 10); String path = randomAlphaOfLengthBetween(1, 10); String routing = randomAlphaOfLengthBetween(1, 10); - TermsLookup termsLookup = new TermsLookup(index, type, id, path); + TermsLookup termsLookup = new TermsLookup(index, id, path); termsLookup.routing(routing); assertEquals(index, termsLookup.index()); - assertEquals(type, termsLookup.type()); assertEquals(id, termsLookup.id()); assertEquals(path, termsLookup.path()); assertEquals(routing, termsLookup.routing()); } public void testIllegalArguments() { - String type = randomAlphaOfLength(5); String id = randomAlphaOfLength(5); String path = randomAlphaOfLength(5); String index = randomAlphaOfLength(5); - switch (randomIntBetween(0, 3)) { + switch (randomIntBetween(0, 2)) { case 0: - type = null; - break; - case 1: id = null; break; - case 2: + case 1: path = null; break; - case 3: + case 2: index = null; break; default: fail("unknown case"); } try { - new TermsLookup(index, type, id, path); + new TermsLookup(index, id, path); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("[terms] query lookup element requires specifying")); } @@ -99,35 +93,6 @@ public void testSerialization() throws IOException { } } - public void testSerializationWithTypes() throws IOException { - TermsLookup termsLookup = randomTermsLookupWithTypes(); - try (BytesStreamOutput output = new BytesStreamOutput()) { - termsLookup.writeTo(output); - try (StreamInput in = output.bytes().streamInput()) { - TermsLookup deserializedLookup = new TermsLookup(in); - assertEquals(deserializedLookup, termsLookup); - assertEquals(deserializedLookup.hashCode(), termsLookup.hashCode()); - assertNotSame(deserializedLookup, termsLookup); - } - } - } - - public void testXContentParsingWithType() throws IOException { - XContentParser parser = createParser( - JsonXContent.jsonXContent, - "{ \"index\" : \"index\", \"id\" : \"id\", \"type\" : \"type\", \"path\" : \"path\", \"routing\" : \"routing\" }" - ); - - TermsLookup tl = TermsLookup.parseTermsLookup(parser); - assertEquals("index", tl.index()); - assertEquals("type", tl.type()); - assertEquals("id", tl.id()); - assertEquals("path", tl.path()); - assertEquals("routing", tl.routing()); - - assertWarnings("Deprecated field [type] used, this field is unused and will be removed entirely"); - } - public void testXContentParsing() throws IOException { XContentParser parser = createParser( JsonXContent.jsonXContent, @@ -136,7 +101,6 @@ public void testXContentParsing() throws IOException { TermsLookup tl = TermsLookup.parseTermsLookup(parser); assertEquals("index", tl.index()); - assertNull(tl.type()); assertEquals("id", tl.id()); assertEquals("path", tl.path()); assertEquals("routing", tl.routing()); @@ -147,13 +111,4 @@ public static TermsLookup randomTermsLookup() { randomBoolean() ? randomAlphaOfLength(10) : null ); } - - public static TermsLookup randomTermsLookupWithTypes() { - return new TermsLookup( - randomAlphaOfLength(10), - randomAlphaOfLength(10), - randomAlphaOfLength(10), - randomAlphaOfLength(10).replace('.', '_') - ).routing(randomBoolean() ? randomAlphaOfLength(10) : null); - } } diff --git a/server/src/test/java/org/opensearch/indices/recovery/PeerRecoveryTargetServiceTests.java b/server/src/test/java/org/opensearch/indices/recovery/PeerRecoveryTargetServiceTests.java index 501253bee27f9..e54f06937cad3 100644 --- a/server/src/test/java/org/opensearch/indices/recovery/PeerRecoveryTargetServiceTests.java +++ b/server/src/test/java/org/opensearch/indices/recovery/PeerRecoveryTargetServiceTests.java @@ -185,7 +185,7 @@ private SeqNoStats populateRandomData(IndexShard shard) throws IOException { shard.getOperationPrimaryTerm(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(shard.shardId().getIndexName(), "_doc", UUIDs.randomBase64UUID(), new BytesArray("{}"), XContentType.JSON) + new SourceToParse(shard.shardId().getIndexName(), UUIDs.randomBase64UUID(), new BytesArray("{}"), XContentType.JSON) ); if (randomInt(100) < 5) { shard.flush(new FlushRequest().waitIfOngoing(true)); diff --git a/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java b/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java index 720356cb49588..dbafab49d8655 100644 --- a/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java @@ -46,6 +46,8 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.util.SetOnce; +import org.junit.After; +import org.junit.Before; import org.opensearch.ExceptionsHelper; import org.opensearch.Version; import org.opensearch.action.ActionListener; @@ -59,6 +61,7 @@ import org.opensearch.common.UUIDs; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.io.FileSystemUtils; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lucene.store.IndexOutputOutputStream; @@ -93,14 +96,12 @@ import org.opensearch.index.translog.Translog; import org.opensearch.test.CorruptionUtils; import org.opensearch.test.DummyShardLock; -import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.IndexSettingsModule; +import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.VersionUtils; import org.opensearch.threadpool.FixedExecutorBuilder; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; -import org.junit.After; -import org.junit.Before; import java.io.IOException; import java.io.OutputStream; @@ -461,7 +462,6 @@ public void indexTranslogOperations( } private Engine.Index getIndex(final String id) { - final String type = "test"; final ParseContext.Document document = new ParseContext.Document(); document.add(new TextField("test", "test", Field.Store.YES)); final Field idField = new Field("_id", Uid.encodeId(id), IdFieldMapper.Defaults.FIELD_TYPE); @@ -477,7 +477,6 @@ private Engine.Index getIndex(final String id) { versionField, seqID, id, - type, null, Arrays.asList(document), source, @@ -650,7 +649,7 @@ public void testThrowExceptionOnPrimaryRelocatedBeforePhase1Started() throws IOE when(shard.seqNoStats()).thenReturn(mock(SeqNoStats.class)); when(shard.segmentStats(anyBoolean(), anyBoolean())).thenReturn(mock(SegmentsStats.class)); when(shard.isRelocatedPrimary()).thenReturn(true); - when(shard.acquireSafeIndexCommit()).thenReturn(mock(Engine.IndexCommitRef.class)); + when(shard.acquireSafeIndexCommit()).thenReturn(mock(GatedCloseable.class)); doAnswer(invocation -> { ((ActionListener) invocation.getArguments()[0]).onResponse(() -> {}); return null; @@ -1187,16 +1186,9 @@ private static List generateOperations(int numOps) { final long seqNo = randomValueOtherThanMany(n -> seqNos.add(n) == false, OpenSearchTestCase::randomNonNegativeLong); final Translog.Operation op; if (randomBoolean()) { - op = new Translog.Index("_doc", "id", seqNo, randomNonNegativeLong(), randomNonNegativeLong(), source, null, -1); + op = new Translog.Index("id", seqNo, randomNonNegativeLong(), randomNonNegativeLong(), source, null, -1); } else if (randomBoolean()) { - op = new Translog.Delete( - "_doc", - "id", - new Term("_id", Uid.encodeId("id")), - seqNo, - randomNonNegativeLong(), - randomNonNegativeLong() - ); + op = new Translog.Delete("id", seqNo, randomNonNegativeLong(), randomNonNegativeLong()); } else { op = new Translog.NoOp(seqNo, randomNonNegativeLong(), "test"); } diff --git a/server/src/test/java/org/opensearch/indices/recovery/RecoveryTests.java b/server/src/test/java/org/opensearch/indices/recovery/RecoveryTests.java index d105da6565897..5e09e0f2253df 100644 --- a/server/src/test/java/org/opensearch/indices/recovery/RecoveryTests.java +++ b/server/src/test/java/org/opensearch/indices/recovery/RecoveryTests.java @@ -41,7 +41,6 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.store.AlreadyClosedException; import org.opensearch.ExceptionsHelper; -import org.opensearch.Version; import org.opensearch.action.ActionListener; import org.opensearch.action.admin.indices.flush.FlushRequest; import org.opensearch.action.bulk.BulkShardRequest; @@ -69,7 +68,6 @@ import org.opensearch.index.store.Store; import org.opensearch.index.translog.SnapshotMatchers; import org.opensearch.index.translog.Translog; -import org.opensearch.test.VersionUtils; import java.io.IOException; import java.util.HashMap; @@ -146,108 +144,6 @@ public void testRetentionPolicyChangeDuringRecovery() throws Exception { } } - public void testRecoveryWithOutOfOrderDeleteWithTranslog() throws Exception { - /* - * The flow of this test: - * - delete #1 - * - roll generation (to create gen 2) - * - index #0 - * - index #3 - * - flush (commit point has max_seqno 3, and local checkpoint 1 -> points at gen 2, previous commit point is maintained) - * - index #2 - * - index #5 - * - If flush and the translog retention disabled, delete #1 will be removed while index #0 is still retained and replayed. - */ - Settings settings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_2_0_0)) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false) - .build(); - try (ReplicationGroup shards = createGroup(1, settings)) { - shards.startAll(); - // create out of order delete and index op on replica - final IndexShard orgReplica = shards.getReplicas().get(0); - final String indexName = orgReplica.shardId().getIndexName(); - final long primaryTerm = orgReplica.getOperationPrimaryTerm(); - - // delete #1 - orgReplica.advanceMaxSeqNoOfUpdatesOrDeletes(1); // manually advance msu for this delete - orgReplica.applyDeleteOperationOnReplica(1, primaryTerm, 2, "type", "id"); - getTranslog(orgReplica).rollGeneration(); // isolate the delete in it's own generation - // index #0 - orgReplica.applyIndexOperationOnReplica( - 0, - primaryTerm, - 1, - IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, - false, - new SourceToParse(indexName, "type", "id", new BytesArray("{}"), XContentType.JSON) - ); - // index #3 - orgReplica.applyIndexOperationOnReplica( - 3, - primaryTerm, - 1, - IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, - false, - new SourceToParse(indexName, "type", "id-3", new BytesArray("{}"), XContentType.JSON) - ); - // Flushing a new commit with local checkpoint=1 allows to delete the translog gen #1. - orgReplica.flush(new FlushRequest().force(true).waitIfOngoing(true)); - // index #2 - orgReplica.applyIndexOperationOnReplica( - 2, - primaryTerm, - 1, - IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, - false, - new SourceToParse(indexName, "type", "id-2", new BytesArray("{}"), XContentType.JSON) - ); - orgReplica.sync(); // advance local checkpoint - orgReplica.updateGlobalCheckpointOnReplica(3L, "test"); - // index #5 -> force NoOp #4. - orgReplica.applyIndexOperationOnReplica( - 5, - primaryTerm, - 1, - IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, - false, - new SourceToParse(indexName, "type", "id-5", new BytesArray("{}"), XContentType.JSON) - ); - - final int translogOps; - if (randomBoolean()) { - if (randomBoolean()) { - logger.info("--> flushing shard (translog will be trimmed)"); - IndexMetadata.Builder builder = IndexMetadata.builder(orgReplica.indexSettings().getIndexMetadata()); - builder.settings( - Settings.builder() - .put(orgReplica.indexSettings().getSettings()) - .put(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey(), "-1") - .put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), "-1") - ); - orgReplica.indexSettings().updateIndexMetadata(builder.build()); - orgReplica.onSettingsChanged(); - translogOps = 5; // 4 ops + seqno gaps (delete #1 is removed but index #0 will be replayed). - } else { - logger.info("--> flushing shard (translog will be retained)"); - translogOps = 6; // 5 ops + seqno gaps - } - flushShard(orgReplica); - } else { - translogOps = 6; // 5 ops + seqno gaps - } - - final IndexShard orgPrimary = shards.getPrimary(); - shards.promoteReplicaToPrimary(orgReplica).get(); // wait for primary/replica sync to make sure seq# gap is closed. - - IndexShard newReplica = shards.addReplicaWithExistingPath(orgPrimary.shardPath(), orgPrimary.routingEntry().currentNodeId()); - shards.recoverReplica(newReplica); - shards.assertAllEqual(3); - - assertThat(getTranslog(newReplica).totalOperations(), equalTo(translogOps)); - } - } - public void testRecoveryWithOutOfOrderDeleteWithSoftDeletes() throws Exception { Settings settings = Settings.builder() .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) @@ -265,7 +161,7 @@ public void testRecoveryWithOutOfOrderDeleteWithSoftDeletes() throws Exception { // delete #1 orgReplica.advanceMaxSeqNoOfUpdatesOrDeletes(1); // manually advance msu for this delete - orgReplica.applyDeleteOperationOnReplica(1, primaryTerm, 2, "type", "id"); + orgReplica.applyDeleteOperationOnReplica(1, primaryTerm, 2, "id"); orgReplica.flush(new FlushRequest().force(true)); // isolate delete#1 in its own translog generation and lucene segment // index #0 orgReplica.applyIndexOperationOnReplica( @@ -274,7 +170,7 @@ public void testRecoveryWithOutOfOrderDeleteWithSoftDeletes() throws Exception { 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(indexName, "type", "id", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(indexName, "id", new BytesArray("{}"), XContentType.JSON) ); // index #3 orgReplica.applyIndexOperationOnReplica( @@ -283,7 +179,7 @@ public void testRecoveryWithOutOfOrderDeleteWithSoftDeletes() throws Exception { 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(indexName, "type", "id-3", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(indexName, "id-3", new BytesArray("{}"), XContentType.JSON) ); // Flushing a new commit with local checkpoint=1 allows to delete the translog gen #1. orgReplica.flush(new FlushRequest().force(true).waitIfOngoing(true)); @@ -294,7 +190,7 @@ public void testRecoveryWithOutOfOrderDeleteWithSoftDeletes() throws Exception { 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(indexName, "type", "id-2", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(indexName, "id-2", new BytesArray("{}"), XContentType.JSON) ); orgReplica.sync(); // advance local checkpoint orgReplica.updateGlobalCheckpointOnReplica(3L, "test"); @@ -305,7 +201,7 @@ public void testRecoveryWithOutOfOrderDeleteWithSoftDeletes() throws Exception { 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(indexName, "type", "id-5", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(indexName, "id-5", new BytesArray("{}"), XContentType.JSON) ); if (randomBoolean()) { @@ -329,7 +225,7 @@ public void testRecoveryWithOutOfOrderDeleteWithSoftDeletes() throws Exception { IndexShard newReplica = shards.addReplicaWithExistingPath(orgPrimary.shardPath(), orgPrimary.routingEntry().currentNodeId()); shards.recoverReplica(newReplica); shards.assertAllEqual(3); - try (Translog.Snapshot snapshot = newReplica.getHistoryOperations("test", Engine.HistorySource.INDEX, 0)) { + try (Translog.Snapshot snapshot = newReplica.newChangesSnapshot("test", 0, Long.MAX_VALUE, false, randomBoolean())) { assertThat(snapshot, SnapshotMatchers.size(6)); } } @@ -414,13 +310,7 @@ public void testPeerRecoverySendSafeCommitInFileBased() throws Exception { Engine.IndexResult result = primaryShard.applyIndexOperationOnPrimary( Versions.MATCH_ANY, VersionType.INTERNAL, - new SourceToParse( - primaryShard.shardId().getIndexName(), - "_doc", - Integer.toString(i), - new BytesArray("{}"), - XContentType.JSON - ), + new SourceToParse(primaryShard.shardId().getIndexName(), Integer.toString(i), new BytesArray("{}"), XContentType.JSON), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, @@ -590,7 +480,7 @@ public void testRecoveryTrimsLocalTranslog() throws Exception { } int inflightDocs = scaledRandomIntBetween(1, 100); for (int i = 0; i < inflightDocs; i++) { - final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "extra_" + i).source("{}", XContentType.JSON); + final IndexRequest indexRequest = new IndexRequest(index.getName()).id("extra_" + i).source("{}", XContentType.JSON); final BulkShardRequest bulkShardRequest = indexOnPrimary(indexRequest, oldPrimary); for (IndexShard replica : randomSubsetOf(shards.getReplicas())) { indexOnReplica(bulkShardRequest, shards, replica); diff --git a/server/src/test/java/org/opensearch/ingest/IngestDocumentTests.java b/server/src/test/java/org/opensearch/ingest/IngestDocumentTests.java index 2c4bc5061d822..a6ea02a5423c4 100644 --- a/server/src/test/java/org/opensearch/ingest/IngestDocumentTests.java +++ b/server/src/test/java/org/opensearch/ingest/IngestDocumentTests.java @@ -92,7 +92,7 @@ public void setTestIngestDocument() { list2.add("bar"); list2.add("baz"); document.put("list2", list2); - ingestDocument = new IngestDocument("index", "type", "id", null, null, null, document); + ingestDocument = new IngestDocument("index", "id", null, null, null, document); } public void testSimpleGetFieldValue() { @@ -101,7 +101,6 @@ public void testSimpleGetFieldValue() { assertThat(ingestDocument.getFieldValue("_source.foo", String.class), equalTo("bar")); assertThat(ingestDocument.getFieldValue("_source.int", Integer.class), equalTo(123)); assertThat(ingestDocument.getFieldValue("_index", String.class), equalTo("index")); - assertThat(ingestDocument.getFieldValue("_type", String.class), equalTo("type")); assertThat(ingestDocument.getFieldValue("_id", String.class), equalTo("id")); assertThat( ingestDocument.getFieldValue("_ingest.timestamp", ZonedDateTime.class), @@ -238,7 +237,6 @@ public void testGetFieldValueEmpty() { public void testHasField() { assertTrue(ingestDocument.hasField("fizz")); assertTrue(ingestDocument.hasField("_index")); - assertTrue(ingestDocument.hasField("_type")); assertTrue(ingestDocument.hasField("_id")); assertTrue(ingestDocument.hasField("_source.fizz")); assertTrue(ingestDocument.hasField("_ingest.timestamp")); @@ -808,23 +806,23 @@ public void testSetFieldValueEmptyName() { public void testRemoveField() { ingestDocument.removeField("foo"); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(7)); assertThat(ingestDocument.getSourceAndMetadata().containsKey("foo"), equalTo(false)); ingestDocument.removeField("_index"); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(7)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(6)); assertThat(ingestDocument.getSourceAndMetadata().containsKey("_index"), equalTo(false)); ingestDocument.removeField("_source.fizz"); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(6)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(5)); assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(false)); assertThat(ingestDocument.getIngestMetadata().size(), equalTo(1)); ingestDocument.removeField("_ingest.timestamp"); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(6)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(5)); assertThat(ingestDocument.getIngestMetadata().size(), equalTo(0)); } public void testRemoveInnerField() { ingestDocument.removeField("fizz.buzz"); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(9)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8)); assertThat(ingestDocument.getSourceAndMetadata().get("fizz"), instanceOf(Map.class)); @SuppressWarnings("unchecked") Map map = (Map) ingestDocument.getSourceAndMetadata().get("fizz"); @@ -833,17 +831,17 @@ public void testRemoveInnerField() { ingestDocument.removeField("fizz.foo_null"); assertThat(map.size(), equalTo(2)); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(9)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8)); assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(true)); ingestDocument.removeField("fizz.1"); assertThat(map.size(), equalTo(1)); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(9)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8)); assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(true)); ingestDocument.removeField("fizz.list"); assertThat(map.size(), equalTo(0)); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(9)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8)); assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(true)); } @@ -879,7 +877,7 @@ public void testRemoveSourceObject() { public void testRemoveIngestObject() { ingestDocument.removeField("_ingest"); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(7)); assertThat(ingestDocument.getSourceAndMetadata().containsKey("_ingest"), equalTo(false)); } @@ -901,7 +899,7 @@ public void testRemoveEmptyPathAfterStrippingOutPrefix() { public void testListRemoveField() { ingestDocument.removeField("list.0.field"); - assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(9)); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8)); assertThat(ingestDocument.getSourceAndMetadata().containsKey("list"), equalTo(true)); Object object = ingestDocument.getSourceAndMetadata().get("list"); assertThat(object, instanceOf(List.class)); diff --git a/server/src/test/java/org/opensearch/ingest/IngestServiceTests.java b/server/src/test/java/org/opensearch/ingest/IngestServiceTests.java index 544fa7bc09d8f..fcd15e85979f7 100644 --- a/server/src/test/java/org/opensearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/opensearch/ingest/IngestServiceTests.java @@ -181,7 +181,8 @@ public void testExecuteIndexPipelineDoesNotExist() { Collections.singletonList(DUMMY_PLUGIN), client ); - final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(emptyMap()) + final IndexRequest indexRequest = new IndexRequest("_index").id("_id") + .source(emptyMap()) .setPipeline("_id") .setFinalPipeline("_none"); @@ -729,13 +730,12 @@ public String getType() { ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); final SetOnce failure = new SetOnce<>(); BulkRequest bulkRequest = new BulkRequest(); - final IndexRequest indexRequest1 = new IndexRequest("_index", "_type", "_id1").source(emptyMap()) + final IndexRequest indexRequest1 = new IndexRequest("_index").id("_id1") + .source(emptyMap()) .setPipeline("_none") .setFinalPipeline("_none"); bulkRequest.add(indexRequest1); - IndexRequest indexRequest2 = new IndexRequest("_index", "_type", "_id2").source(emptyMap()) - .setPipeline(id) - .setFinalPipeline("_none"); + IndexRequest indexRequest2 = new IndexRequest("_index").id("_id2").source(emptyMap()).setPipeline(id).setFinalPipeline("_none"); bulkRequest.add(indexRequest2); final BiConsumer failureHandler = (slot, e) -> { @@ -778,15 +778,15 @@ public void testExecuteBulkPipelineDoesNotExist() { BulkRequest bulkRequest = new BulkRequest(); - IndexRequest indexRequest1 = new IndexRequest("_index", "_type", "_id1").source(emptyMap()) + IndexRequest indexRequest1 = new IndexRequest("_index").id("_id1") + .source(emptyMap()) .setPipeline("_none") .setFinalPipeline("_none"); bulkRequest.add(indexRequest1); - IndexRequest indexRequest2 = new IndexRequest("_index", "_type", "_id2").source(emptyMap()) - .setPipeline("_id") - .setFinalPipeline("_none"); + IndexRequest indexRequest2 = new IndexRequest("_index").id("_id2").source(emptyMap()).setPipeline("_id").setFinalPipeline("_none"); bulkRequest.add(indexRequest2); - IndexRequest indexRequest3 = new IndexRequest("_index", "_type", "_id3").source(emptyMap()) + IndexRequest indexRequest3 = new IndexRequest("_index").id("_id3") + .source(emptyMap()) .setPipeline("does_not_exist") .setFinalPipeline("_none"); bulkRequest.add(indexRequest3); @@ -822,7 +822,8 @@ public void testExecuteSuccess() { ClusterState previousClusterState = clusterState; clusterState = IngestService.innerPut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); - final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(emptyMap()) + final IndexRequest indexRequest = new IndexRequest("_index").id("_id") + .source(emptyMap()) .setPipeline("_id") .setFinalPipeline("_none"); @SuppressWarnings("unchecked") @@ -852,7 +853,8 @@ public void testExecuteEmptyPipeline() throws Exception { ClusterState previousClusterState = clusterState; clusterState = IngestService.innerPut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); - final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(emptyMap()) + final IndexRequest indexRequest = new IndexRequest("_index").id("_id") + .source(emptyMap()) .setPipeline("_id") .setFinalPipeline("_none"); @SuppressWarnings("unchecked") @@ -910,7 +912,8 @@ public void testExecutePropagateAllMetadataUpdates() throws Exception { handler.accept(ingestDocument, null); return null; }).when(processor).execute(any(), any()); - final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(emptyMap()) + final IndexRequest indexRequest = new IndexRequest("_index").id("_id") + .source(emptyMap()) .setPipeline("_id") .setFinalPipeline("_none"); @SuppressWarnings("unchecked") @@ -929,7 +932,6 @@ public void testExecutePropagateAllMetadataUpdates() throws Exception { verify(failureHandler, never()).accept(any(), any()); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); assertThat(indexRequest.index(), equalTo("update_index")); - assertThat(indexRequest.type(), equalTo("update_type")); assertThat(indexRequest.id(), equalTo("update_id")); assertThat(indexRequest.routing(), equalTo("update_routing")); assertThat(indexRequest.version(), equalTo(newVersion)); @@ -952,7 +954,8 @@ public void testExecuteFailure() throws Exception { ClusterState previousClusterState = clusterState; clusterState = IngestService.innerPut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); - final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(emptyMap()) + final IndexRequest indexRequest = new IndexRequest("_index").id("_id") + .source(emptyMap()) .setPipeline("_id") .setFinalPipeline("_none"); doThrow(new RuntimeException()).when(processor) @@ -1011,7 +1014,8 @@ public void testExecuteSuccessWithOnFailure() throws Exception { ClusterState previousClusterState = clusterState; clusterState = IngestService.innerPut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); - final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(emptyMap()) + final IndexRequest indexRequest = new IndexRequest("_index").id("_id") + .source(emptyMap()) .setPipeline("_id") .setFinalPipeline("_none"); @SuppressWarnings("unchecked") @@ -1053,7 +1057,8 @@ public void testExecuteFailureWithNestedOnFailure() throws Exception { ClusterState previousClusterState = clusterState; clusterState = IngestService.innerPut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); - final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(emptyMap()) + final IndexRequest indexRequest = new IndexRequest("_index").id("_id") + .source(emptyMap()) .setPipeline("_id") .setFinalPipeline("_none"); doThrow(new RuntimeException()).when(onFailureOnFailureProcessor) @@ -1089,12 +1094,12 @@ public void testBulkRequestExecutionWithFailures() throws Exception { DocWriteRequest request; if (randomBoolean()) { if (randomBoolean()) { - request = new DeleteRequest("_index", "_type", "_id"); + request = new DeleteRequest("_index", "_id"); } else { - request = new UpdateRequest("_index", "_type", "_id"); + request = new UpdateRequest("_index", "_id"); } } else { - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline(pipelineId).setFinalPipeline("_none"); + IndexRequest indexRequest = new IndexRequest("_index").id("_id").setPipeline(pipelineId).setFinalPipeline("_none"); indexRequest.source(Requests.INDEX_CONTENT_TYPE, "field1", "value1"); request = indexRequest; numIndexRequests++; @@ -1154,7 +1159,7 @@ public void testBulkRequestExecution() throws Exception { logger.info("Using [{}], not randomly determined default [{}]", xContentType, Requests.INDEX_CONTENT_TYPE); int numRequest = scaledRandomIntBetween(8, 64); for (int i = 0; i < numRequest; i++) { - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline(pipelineId).setFinalPipeline("_none"); + IndexRequest indexRequest = new IndexRequest("_index").id("_id").setPipeline(pipelineId).setFinalPipeline("_none"); indexRequest.source(xContentType, "field1", "value1"); bulkRequest.add(indexRequest); } @@ -1420,12 +1425,14 @@ public String getDescription() { ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); BulkRequest bulkRequest = new BulkRequest(); - final IndexRequest indexRequest1 = new IndexRequest("_index", "_type", "_id1").source(Collections.emptyMap()) + final IndexRequest indexRequest1 = new IndexRequest("_index").id("_id1") + .source(Collections.emptyMap()) .setPipeline("_none") .setFinalPipeline("_none"); bulkRequest.add(indexRequest1); - IndexRequest indexRequest2 = new IndexRequest("_index", "_type", "_id2").source(Collections.emptyMap()) + IndexRequest indexRequest2 = new IndexRequest("_index").id("_id2") + .source(Collections.emptyMap()) .setPipeline("_id") .setFinalPipeline("_none"); bulkRequest.add(indexRequest2); @@ -1711,11 +1718,11 @@ private class IngestDocumentMatcher implements ArgumentMatcher { private final IngestDocument ingestDocument; IngestDocumentMatcher(String index, String type, String id, Map source) { - this.ingestDocument = new IngestDocument(index, type, id, null, null, null, source); + this.ingestDocument = new IngestDocument(index, id, null, null, null, source); } IngestDocumentMatcher(String index, String type, String id, Long version, VersionType versionType, Map source) { - this.ingestDocument = new IngestDocument(index, type, id, null, version, versionType, source); + this.ingestDocument = new IngestDocument(index, id, null, version, versionType, source); } @Override diff --git a/server/src/test/java/org/opensearch/monitor/os/OsProbeTests.java b/server/src/test/java/org/opensearch/monitor/os/OsProbeTests.java index 140143ad4e00f..505dce8879bdd 100644 --- a/server/src/test/java/org/opensearch/monitor/os/OsProbeTests.java +++ b/server/src/test/java/org/opensearch/monitor/os/OsProbeTests.java @@ -41,6 +41,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; +import static org.junit.Assume.assumeThat; import java.io.IOException; import java.math.BigInteger; @@ -50,9 +51,17 @@ import java.util.Locale; import java.util.stream.Collectors; +import org.apache.logging.log4j.Logger; import org.apache.lucene.util.Constants; import org.opensearch.test.OpenSearchTestCase; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + public class OsProbeTests extends OpenSearchTestCase { public void testOsInfo() throws IOException { @@ -277,6 +286,25 @@ public void testCgroupProbeWithMissingMemory() { assertNull(cgroup); } + public void testLogWarnCpuMessageOnlyOnes() { + final Logger logger = mock(Logger.class); + + final OsProbe noCpuStatsOsProbe = new OsProbe(logger) { + @Override + List readSysFsCgroupCpuAcctCpuStat(String controlGroup) throws IOException { + return Collections.singletonList("nr_periods 1"); + } + }; + + assumeThat("CGroups are not available", noCpuStatsOsProbe.areCgroupStatsAvailable(), is(true)); + noCpuStatsOsProbe.osStats(); + // no nr_throttled and throttled_time + verify(logger, times(2)).warn(anyString()); + reset(logger); + noCpuStatsOsProbe.osStats(); + verify(logger, never()).warn(anyString()); + } + private static List getProcSelfGroupLines(String hierarchy) { return Arrays.asList( "10:freezer:/", @@ -361,4 +389,5 @@ boolean areCgroupStatsAvailable() { } }; } + } diff --git a/server/src/test/java/org/opensearch/recovery/RecoveriesCollectionTests.java b/server/src/test/java/org/opensearch/recovery/RecoveriesCollectionTests.java index 69923e4390ead..6a08f5115d1e2 100644 --- a/server/src/test/java/org/opensearch/recovery/RecoveriesCollectionTests.java +++ b/server/src/test/java/org/opensearch/recovery/RecoveriesCollectionTests.java @@ -69,10 +69,10 @@ public void testLastAccessTimeUpdate() throws Exception { final RecoveriesCollection collection = new RecoveriesCollection(logger, threadPool); final long recoveryId = startRecovery(collection, shards.getPrimaryNode(), shards.addReplica()); try (RecoveriesCollection.RecoveryRef status = collection.getRecovery(recoveryId)) { - final long lastSeenTime = status.target().lastAccessTime(); + final long lastSeenTime = status.get().lastAccessTime(); assertBusy(() -> { try (RecoveriesCollection.RecoveryRef currentStatus = collection.getRecovery(recoveryId)) { - assertThat("access time failed to update", lastSeenTime, lessThan(currentStatus.target().lastAccessTime())); + assertThat("access time failed to update", lastSeenTime, lessThan(currentStatus.get().lastAccessTime())); } }); } finally { @@ -120,7 +120,7 @@ public void testRecoveryCancellation() throws Exception { final long recoveryId = startRecovery(collection, shards.getPrimaryNode(), shards.addReplica()); final long recoveryId2 = startRecovery(collection, shards.getPrimaryNode(), shards.addReplica()); try (RecoveriesCollection.RecoveryRef recoveryRef = collection.getRecovery(recoveryId)) { - ShardId shardId = recoveryRef.target().shardId(); + ShardId shardId = recoveryRef.get().shardId(); assertTrue("failed to cancel recoveries", collection.cancelRecoveriesForShard(shardId, "test")); assertThat("all recoveries should be cancelled", collection.size(), equalTo(0)); } finally { @@ -160,8 +160,8 @@ public void testResetRecovery() throws Exception { assertEquals(currentAsTarget, shard.recoveryStats().currentAsTarget()); try (RecoveriesCollection.RecoveryRef newRecoveryRef = collection.getRecovery(resetRecoveryId)) { shards.recoverReplica(shard, (s, n) -> { - assertSame(s, newRecoveryRef.target().indexShard()); - return newRecoveryRef.target(); + assertSame(s, newRecoveryRef.get().indexShard()); + return newRecoveryRef.get(); }, false); } shards.assertAllEqual(numDocs); diff --git a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java index e2a5a26850f73..94813d1f7cd33 100644 --- a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java +++ b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java @@ -126,7 +126,7 @@ public void testRetrieveSnapshots() throws Exception { int numDocs = randomIntBetween(10, 20); for (int i = 0; i < numDocs; i++) { String id = Integer.toString(i); - client().prepareIndex(indexName, "type1", id).setSource("text", "sometext").get(); + client().prepareIndex(indexName).setId(id).setSource("text", "sometext").get(); } client().admin().indices().prepareFlush(indexName).get(); diff --git a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestCreateIndexActionTests.java b/server/src/test/java/org/opensearch/rest/action/admin/indices/RestCreateIndexActionTests.java index 2400a59df6021..707210abad948 100644 --- a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestCreateIndexActionTests.java +++ b/server/src/test/java/org/opensearch/rest/action/admin/indices/RestCreateIndexActionTests.java @@ -32,48 +32,16 @@ package org.opensearch.rest.action.admin.indices; -import org.opensearch.client.node.NodeClient; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; -import org.opensearch.rest.RestRequest; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.junit.Before; +import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; -import java.util.HashMap; import java.util.Map; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; -import static org.mockito.Mockito.mock; - -public class RestCreateIndexActionTests extends RestActionTestCase { - private RestCreateIndexAction action; - - @Before - public void setupAction() { - action = new RestCreateIndexAction(); - controller().registerHandler(action); - } - - public void testIncludeTypeName() throws IOException { - Map params = new HashMap<>(); - params.put(INCLUDE_TYPE_NAME_PARAMETER, randomFrom("true", "false")); - RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT) - .withPath("/some_index") - .withParams(params) - .build(); - - action.prepareRequest(deprecatedRequest, mock(NodeClient.class)); - assertWarnings(RestCreateIndexAction.TYPES_DEPRECATION_MESSAGE); - - RestRequest validRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT) - .withPath("/some_index") - .build(); - action.prepareRequest(validRequest, mock(NodeClient.class)); - } +public class RestCreateIndexActionTests extends OpenSearchTestCase { public void testPrepareTypelessRequest() throws IOException { XContentBuilder content = XContentFactory.jsonBuilder() @@ -95,8 +63,7 @@ public void testPrepareTypelessRequest() throws IOException { .endObject(); Map contentAsMap = XContentHelper.convertToMap(BytesReference.bytes(content), true, content.contentType()).v2(); - boolean includeTypeName = false; - Map source = RestCreateIndexAction.prepareMappings(contentAsMap, includeTypeName); + Map source = RestCreateIndexAction.prepareMappings(contentAsMap); XContentBuilder expectedContent = XContentFactory.jsonBuilder() .startObject() @@ -126,34 +93,6 @@ public void testPrepareTypelessRequest() throws IOException { assertEquals(expectedContentAsMap, source); } - public void testPrepareTypedRequest() throws IOException { - XContentBuilder content = XContentFactory.jsonBuilder() - .startObject() - .startObject("mappings") - .startObject("type") - .startObject("properties") - .startObject("field1") - .field("type", "keyword") - .endObject() - .startObject("field2") - .field("type", "text") - .endObject() - .endObject() - .endObject() - .endObject() - .startObject("aliases") - .startObject("read_alias") - .endObject() - .endObject() - .endObject(); - - Map contentAsMap = XContentHelper.convertToMap(BytesReference.bytes(content), true, content.contentType()).v2(); - boolean includeTypeName = true; - Map source = RestCreateIndexAction.prepareMappings(contentAsMap, includeTypeName); - - assertEquals(contentAsMap, source); - } - public void testMalformedMappings() throws IOException { XContentBuilder content = XContentFactory.jsonBuilder() .startObject() @@ -166,8 +105,7 @@ public void testMalformedMappings() throws IOException { Map contentAsMap = XContentHelper.convertToMap(BytesReference.bytes(content), true, content.contentType()).v2(); - boolean includeTypeName = false; - Map source = RestCreateIndexAction.prepareMappings(contentAsMap, includeTypeName); + Map source = RestCreateIndexAction.prepareMappings(contentAsMap); assertEquals(contentAsMap, source); } } diff --git a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingActionTests.java b/server/src/test/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingActionTests.java deleted file mode 100644 index 1ab35e420a77e..0000000000000 --- a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingActionTests.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.rest.action.admin.indices; - -import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.concurrent.ThreadContext; -import org.opensearch.rest.RestRequest; -import org.opensearch.rest.RestStatus; -import org.opensearch.test.rest.FakeRestChannel; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.junit.Before; - -import java.util.HashMap; -import java.util.Map; - -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; - -public class RestGetFieldMappingActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - controller().registerHandler(new RestGetFieldMappingAction()); - } - - public void testIncludeTypeName() { - Map params = new HashMap<>(); - String path; - if (randomBoolean()) { - params.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - path = "some_index/some_type/_mapping/field/some_field"; - } else { - params.put(INCLUDE_TYPE_NAME_PARAMETER, "false"); - path = "some_index/_mapping/field/some_field"; - } - - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - - RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) - .withPath(path) - .withParams(params) - .build(); - dispatchRequest(deprecatedRequest); - assertWarnings(RestGetFieldMappingAction.TYPES_DEPRECATION_MESSAGE); - - RestRequest validRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) - .withPath("some_index/_mapping/field/some_field") - .build(); - dispatchRequest(validRequest); - } - - public void testTypeInPath() { - // Test that specifying a type while setting include_type_name to false - // results in an illegal argument exception. - Map params = new HashMap<>(); - params.put(INCLUDE_TYPE_NAME_PARAMETER, "false"); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) - .withPath("some_index/some_type/_mapping/field/some_field") - .withParams(params) - .build(); - - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - - FakeRestChannel channel = new FakeRestChannel(request, false, 1); - ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - controller().dispatchRequest(request, channel, threadContext); - - assertEquals(1, channel.errors().get()); - assertEquals(RestStatus.BAD_REQUEST, channel.capturedResponse().status()); - } -} diff --git a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestGetIndicesActionTests.java b/server/src/test/java/org/opensearch/rest/action/admin/indices/RestGetIndicesActionTests.java deleted file mode 100644 index 374b2cb0e8636..0000000000000 --- a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestGetIndicesActionTests.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.rest.action.admin.indices; - -import org.opensearch.client.node.NodeClient; -import org.opensearch.rest.RestRequest; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; -import static org.mockito.Mockito.mock; - -public class RestGetIndicesActionTests extends RestActionTestCase { - - /** - * Test that setting the "include_type_name" parameter raises a warning for the GET request - */ - public void testIncludeTypeNamesWarning() throws IOException { - Map params = new HashMap<>(); - params.put(INCLUDE_TYPE_NAME_PARAMETER, randomFrom("true", "false")); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) - .withPath("/some_index") - .withParams(params) - .build(); - - RestGetIndicesAction handler = new RestGetIndicesAction(); - handler.prepareRequest(request, mock(NodeClient.class)); - assertWarnings(RestGetIndicesAction.TYPES_DEPRECATION_MESSAGE); - - // the same request without the parameter should pass without warning - request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET).withPath("/some_index").build(); - handler.prepareRequest(request, mock(NodeClient.class)); - } - - /** - * Test that setting the "include_type_name" parameter doesn't raises a warning if the HEAD method is used (indices.exists) - */ - public void testIncludeTypeNamesWarningExists() throws IOException { - Map params = new HashMap<>(); - params.put(INCLUDE_TYPE_NAME_PARAMETER, randomFrom("true", "false")); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.HEAD) - .withPath("/some_index") - .withParams(params) - .build(); - - RestGetIndicesAction handler = new RestGetIndicesAction(); - handler.prepareRequest(request, mock(NodeClient.class)); - } -} diff --git a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestGetMappingActionTests.java b/server/src/test/java/org/opensearch/rest/action/admin/indices/RestGetMappingActionTests.java deleted file mode 100644 index 6a7ea5018170c..0000000000000 --- a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestGetMappingActionTests.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.rest.action.admin.indices; - -import org.opensearch.client.node.NodeClient; -import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.concurrent.ThreadContext; -import org.opensearch.rest.RestRequest; -import org.opensearch.rest.RestStatus; -import org.opensearch.test.rest.FakeRestChannel; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.opensearch.threadpool.TestThreadPool; -import org.opensearch.threadpool.ThreadPool; -import org.junit.After; -import org.junit.Before; - -import java.util.HashMap; -import java.util.Map; - -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; -import static org.mockito.Mockito.mock; - -public class RestGetMappingActionTests extends RestActionTestCase { - - private ThreadPool threadPool; - - @Before - public void setUpAction() { - threadPool = new TestThreadPool(RestValidateQueryActionTests.class.getName()); - controller().registerHandler(new RestGetMappingAction(threadPool)); - } - - @After - public void tearDownAction() { - assertTrue(terminate(threadPool)); - } - - public void testTypeExistsDeprecation() throws Exception { - Map params = new HashMap<>(); - params.put("type", "_doc"); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.HEAD) - .withParams(params) - .build(); - - RestGetMappingAction handler = new RestGetMappingAction(threadPool); - handler.prepareRequest(request, mock(NodeClient.class)); - - assertWarnings("Type exists requests are deprecated, as types have been deprecated."); - } - - public void testTypeInPath() { - // Test that specifying a type while setting include_type_name to false - // results in an illegal argument exception. - Map params = new HashMap<>(); - params.put(INCLUDE_TYPE_NAME_PARAMETER, "false"); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) - .withPath("some_index/some_type/_mapping/some_field") - .withParams(params) - .build(); - - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - - FakeRestChannel channel = new FakeRestChannel(request, false, 1); - ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - controller().dispatchRequest(request, channel, threadContext); - - assertEquals(1, channel.errors().get()); - assertEquals(RestStatus.BAD_REQUEST, channel.capturedResponse().status()); - } - - /** - * Setting "include_type_name" to true or false should cause a deprecation warning starting in 7.0 - */ - public void testTypeUrlParameterDeprecation() throws Exception { - Map params = new HashMap<>(); - params.put(INCLUDE_TYPE_NAME_PARAMETER, Boolean.toString(randomBoolean())); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) - .withParams(params) - .withPath("/some_index/_mappings") - .build(); - - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - - FakeRestChannel channel = new FakeRestChannel(request, false, 1); - ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - controller().dispatchRequest(request, channel, threadContext); - - assertWarnings(RestGetMappingAction.TYPES_DEPRECATION_MESSAGE); - } - -} diff --git a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestPutIndexTemplateActionTests.java b/server/src/test/java/org/opensearch/rest/action/admin/indices/RestPutIndexTemplateActionTests.java deleted file mode 100644 index 864d2f244b8c1..0000000000000 --- a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestPutIndexTemplateActionTests.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.rest.action.admin.indices; - -import org.opensearch.client.node.NodeClient; -import org.opensearch.common.bytes.BytesReference; -import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.rest.RestRequest; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.junit.Before; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; -import static org.mockito.Mockito.mock; - -public class RestPutIndexTemplateActionTests extends RestActionTestCase { - private RestPutIndexTemplateAction action; - - @Before - public void setUpAction() { - action = new RestPutIndexTemplateAction(); - controller().registerHandler(action); - } - - public void testIncludeTypeName() throws IOException { - XContentBuilder typedContent = XContentFactory.jsonBuilder() - .startObject() - .startObject("mappings") - .startObject("my_doc") - .startObject("properties") - .startObject("field1") - .field("type", "keyword") - .endObject() - .startObject("field2") - .field("type", "text") - .endObject() - .endObject() - .endObject() - .endObject() - .startObject("aliases") - .startObject("read_alias") - .endObject() - .endObject() - .endObject(); - - Map params = new HashMap<>(); - params.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT) - .withParams(params) - .withPath("/_template/_some_template") - .withContent(BytesReference.bytes(typedContent), XContentType.JSON) - .build(); - action.prepareRequest(request, mock(NodeClient.class)); - assertWarnings(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestPutMappingActionTests.java b/server/src/test/java/org/opensearch/rest/action/admin/indices/RestPutMappingActionTests.java deleted file mode 100644 index c3255558d2569..0000000000000 --- a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestPutMappingActionTests.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.rest.action.admin.indices; - -import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.concurrent.ThreadContext; -import org.opensearch.rest.RestRequest; -import org.opensearch.rest.RestStatus; -import org.opensearch.test.rest.FakeRestChannel; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.junit.Before; - -import java.util.HashMap; -import java.util.Map; - -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; - -public class RestPutMappingActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - controller().registerHandler(new RestPutMappingAction()); - } - - public void testIncludeTypeName() { - Map params = new HashMap<>(); - params.put(INCLUDE_TYPE_NAME_PARAMETER, randomFrom("true", "false")); - RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT) - .withPath("/some_index/_mapping/") - .withParams(params) - .build(); - - dispatchRequest(deprecatedRequest); - assertWarnings(RestPutMappingAction.TYPES_DEPRECATION_MESSAGE); - - RestRequest validRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT) - .withPath("/some_index/_mapping") - .build(); - dispatchRequest(validRequest); - } - - public void testTypeInPath() { - // Test that specifying a type while include_type_name is false - // results in an illegal argument exception. - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT) - .withPath("/some_index/_mapping/some_type") - .build(); - - FakeRestChannel channel = new FakeRestChannel(request, false, 1); - ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - controller().dispatchRequest(request, channel, threadContext); - - assertEquals(1, channel.errors().get()); - assertEquals(RestStatus.BAD_REQUEST, channel.capturedResponse().status()); - } -} diff --git a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestValidateQueryActionTests.java b/server/src/test/java/org/opensearch/rest/action/admin/indices/RestValidateQueryActionTests.java index 9ed3cc41e4fb0..cc1a9d4fd2e40 100644 --- a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestValidateQueryActionTests.java +++ b/server/src/test/java/org/opensearch/rest/action/admin/indices/RestValidateQueryActionTests.java @@ -41,10 +41,8 @@ import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.common.xcontent.XContentType; import org.opensearch.indices.breaker.NoneCircuitBreakerService; -import org.opensearch.rest.RestChannel; import org.opensearch.rest.RestController; import org.opensearch.rest.RestRequest; import org.opensearch.search.AbstractSearchTestCase; @@ -174,31 +172,4 @@ private RestRequest createRestRequest(String content) { .withContent(new BytesArray(content), XContentType.JSON) .build(); } - - public void testTypeInPath() { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) - .withPath("/some_index/some_type/_validate/query") - .build(); - - performRequest(request); - assertWarnings(RestValidateQueryAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeParameter() { - Map params = new HashMap<>(); - params.put("type", "some_type"); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) - .withPath("_validate/query") - .withParams(params) - .build(); - - performRequest(request); - assertWarnings(RestValidateQueryAction.TYPES_DEPRECATION_MESSAGE); - } - - private void performRequest(RestRequest request) { - RestChannel channel = new FakeRestChannel(request, false, 1); - ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - controller.dispatchRequest(request, channel, threadContext); - } } diff --git a/server/src/test/java/org/opensearch/rest/action/document/RestDeleteActionTests.java b/server/src/test/java/org/opensearch/rest/action/document/RestDeleteActionTests.java deleted file mode 100644 index ae7f5a3a92cdf..0000000000000 --- a/server/src/test/java/org/opensearch/rest/action/document/RestDeleteActionTests.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.rest.action.document; - -import org.opensearch.rest.RestRequest; -import org.opensearch.rest.RestRequest.Method; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.junit.Before; - -public class RestDeleteActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - controller().registerHandler(new RestDeleteAction()); - } - - public void testTypeInPath() { - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - - RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.DELETE) - .withPath("/some_index/some_type/some_id") - .build(); - dispatchRequest(deprecatedRequest); - assertWarnings(RestDeleteAction.TYPES_DEPRECATION_MESSAGE); - - RestRequest validRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.DELETE) - .withPath("/some_index/_doc/some_id") - .build(); - dispatchRequest(validRequest); - } -} diff --git a/server/src/test/java/org/opensearch/rest/action/document/RestGetActionTests.java b/server/src/test/java/org/opensearch/rest/action/document/RestGetActionTests.java deleted file mode 100644 index d4d0a81ccecb7..0000000000000 --- a/server/src/test/java/org/opensearch/rest/action/document/RestGetActionTests.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.rest.action.document; - -import org.opensearch.rest.RestRequest.Method; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.junit.Before; - -public class RestGetActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - controller().registerHandler(new RestGetAction()); - } - - public void testTypeInPathWithGet() { - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - - FakeRestRequest.Builder deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()).withPath( - "/some_index/some_type/some_id" - ); - dispatchRequest(deprecatedRequest.withMethod(Method.GET).build()); - assertWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE); - - FakeRestRequest.Builder validRequest = new FakeRestRequest.Builder(xContentRegistry()).withPath("/some_index/_doc/some_id"); - dispatchRequest(validRequest.withMethod(Method.GET).build()); - } - - public void testTypeInPathWithHead() { - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - - FakeRestRequest.Builder deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()).withPath( - "/some_index/some_type/some_id" - ); - dispatchRequest(deprecatedRequest.withMethod(Method.HEAD).build()); - assertWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE); - - FakeRestRequest.Builder validRequest = new FakeRestRequest.Builder(xContentRegistry()).withPath("/some_index/_doc/some_id"); - dispatchRequest(validRequest.withMethod(Method.HEAD).build()); - } -} diff --git a/server/src/test/java/org/opensearch/rest/action/document/RestGetSourceActionTests.java b/server/src/test/java/org/opensearch/rest/action/document/RestGetSourceActionTests.java index 5563003fd923d..ca6ecd052fe6a 100644 --- a/server/src/test/java/org/opensearch/rest/action/document/RestGetSourceActionTests.java +++ b/server/src/test/java/org/opensearch/rest/action/document/RestGetSourceActionTests.java @@ -36,10 +36,8 @@ import org.opensearch.action.get.GetResponse; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; -import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.index.get.GetResult; import org.opensearch.rest.RestRequest; -import org.opensearch.rest.RestRequest.Method; import org.opensearch.rest.RestResponse; import org.opensearch.rest.action.document.RestGetSourceAction.RestGetSourceResponseListener; import org.opensearch.test.rest.FakeRestChannel; @@ -48,10 +46,6 @@ import org.junit.AfterClass; import org.junit.Before; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - import static java.util.Collections.emptyMap; import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.opensearch.rest.RestStatus.OK; @@ -75,58 +69,10 @@ public static void cleanupReferences() { listener = null; } - /** - * test deprecation is logged if type is used in path - */ - public void testTypeInPath() { - boolean assertWarnings = true; - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - for (Method method : Arrays.asList(Method.GET, Method.HEAD)) { - // Ensure we have a fresh context for each request so we don't get duplicate headers - try (ThreadContext.StoredContext ignore = verifyingClient.threadPool().getThreadContext().stashContext()) { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(method) - .withPath("/some_index/some_type/id/_source") - .build(); - - dispatchRequest(request); - if (assertWarnings) { - assertWarnings(RestGetSourceAction.TYPES_DEPRECATION_MESSAGE); - assertWarnings = false; - } - } - } - } - - /** - * test deprecation is logged if type is used as parameter - */ - public void testTypeParameter() { - boolean assertWarnings = true; - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - Map params = new HashMap<>(); - params.put("type", "some_type"); - for (Method method : Arrays.asList(Method.GET, Method.HEAD)) { - // Ensure we have a fresh context for each request so we don't get duplicate headers - try (ThreadContext.StoredContext ignore = verifyingClient.threadPool().getThreadContext().stashContext()) { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(method) - .withPath("/some_index/_source/id") - .withParams(params) - .build(); - dispatchRequest(request); - if (assertWarnings) { - assertWarnings(RestGetSourceAction.TYPES_DEPRECATION_MESSAGE); - assertWarnings = false; - } - } - } - } - public void testRestGetSourceAction() throws Exception { final BytesReference source = new BytesArray("{\"foo\": \"bar\"}"); final GetResponse response = new GetResponse( - new GetResult("index1", "_doc", "1", UNASSIGNED_SEQ_NO, 0, -1, true, source, emptyMap(), null) + new GetResult("index1", "1", UNASSIGNED_SEQ_NO, 0, -1, true, source, emptyMap(), null) ); final RestResponse restResponse = listener.buildResponse(response); @@ -137,22 +83,18 @@ public void testRestGetSourceAction() throws Exception { } public void testRestGetSourceActionWithMissingDocument() { - final GetResponse response = new GetResponse( - new GetResult("index1", "_doc", "1", UNASSIGNED_SEQ_NO, 0, -1, false, null, emptyMap(), null) - ); + final GetResponse response = new GetResponse(new GetResult("index1", "1", UNASSIGNED_SEQ_NO, 0, -1, false, null, emptyMap(), null)); final ResourceNotFoundException exception = expectThrows(ResourceNotFoundException.class, () -> listener.buildResponse(response)); - assertThat(exception.getMessage(), equalTo("Document not found [index1]/[_doc]/[1]")); + assertThat(exception.getMessage(), equalTo("Document not found [index1]/[1]")); } public void testRestGetSourceActionWithMissingDocumentSource() { - final GetResponse response = new GetResponse( - new GetResult("index1", "_doc", "1", UNASSIGNED_SEQ_NO, 0, -1, true, null, emptyMap(), null) - ); + final GetResponse response = new GetResponse(new GetResult("index1", "1", UNASSIGNED_SEQ_NO, 0, -1, true, null, emptyMap(), null)); final ResourceNotFoundException exception = expectThrows(ResourceNotFoundException.class, () -> listener.buildResponse(response)); - assertThat(exception.getMessage(), equalTo("Source not found [index1]/[_doc]/[1]")); + assertThat(exception.getMessage(), equalTo("Source not found [index1]/[1]")); } } diff --git a/server/src/test/java/org/opensearch/rest/action/document/RestMultiGetActionTests.java b/server/src/test/java/org/opensearch/rest/action/document/RestMultiGetActionTests.java deleted file mode 100644 index b157c3b070fad..0000000000000 --- a/server/src/test/java/org/opensearch/rest/action/document/RestMultiGetActionTests.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.rest.action.document; - -import org.opensearch.common.bytes.BytesReference; -import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.rest.RestRequest; -import org.opensearch.rest.RestRequest.Method; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.junit.Before; - -public class RestMultiGetActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - controller().registerHandler(new RestMultiGetAction(Settings.EMPTY)); - } - - public void testTypeInPath() { - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - - RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.GET) - .withPath("some_index/some_type/_mget") - .build(); - dispatchRequest(deprecatedRequest); - assertWarnings(RestMultiGetAction.TYPES_DEPRECATION_MESSAGE); - - RestRequest validRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.GET) - .withPath("some_index/_mget") - .build(); - dispatchRequest(validRequest); - } - - public void testTypeInBody() throws Exception { - XContentBuilder content = XContentFactory.jsonBuilder() - .startObject() - .startArray("docs") - .startObject() - .field("_index", "some_index") - .field("_type", "_doc") - .field("_id", "2") - .endObject() - .startObject() - .field("_index", "test") - .field("_id", "2") - .endObject() - .endArray() - .endObject(); - - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("_mget") - .withContent(BytesReference.bytes(content), XContentType.JSON) - .build(); - dispatchRequest(request); - assertWarnings(RestMultiGetAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/server/src/test/java/org/opensearch/rest/action/document/RestMultiTermVectorsActionTests.java b/server/src/test/java/org/opensearch/rest/action/document/RestMultiTermVectorsActionTests.java deleted file mode 100644 index fa2d580b24e89..0000000000000 --- a/server/src/test/java/org/opensearch/rest/action/document/RestMultiTermVectorsActionTests.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.rest.action.document; - -import org.opensearch.common.bytes.BytesReference; -import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.rest.RestRequest; -import org.opensearch.rest.RestRequest.Method; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.junit.Before; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -public class RestMultiTermVectorsActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - controller().registerHandler(new RestMultiTermVectorsAction()); - } - - public void testTypeInPath() { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.POST) - .withPath("/some_index/some_type/_mtermvectors") - .build(); - - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - - dispatchRequest(request); - assertWarnings(RestMultiTermVectorsAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeParameter() { - Map params = new HashMap<>(); - params.put("type", "some_type"); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.GET) - .withPath("/some_index/_mtermvectors") - .withParams(params) - .build(); - - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - - dispatchRequest(request); - assertWarnings(RestMultiTermVectorsAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeInBody() throws IOException { - XContentBuilder content = XContentFactory.jsonBuilder() - .startObject() - .startArray("docs") - .startObject() - .field("_type", "some_type") - .field("_id", 1) - .endObject() - .endArray() - .endObject(); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.GET) - .withPath("/some_index/_mtermvectors") - .withContent(BytesReference.bytes(content), XContentType.JSON) - .build(); - - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - - dispatchRequest(request); - assertWarnings(RestTermVectorsAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/server/src/test/java/org/opensearch/rest/action/document/RestTermVectorsActionTests.java b/server/src/test/java/org/opensearch/rest/action/document/RestTermVectorsActionTests.java deleted file mode 100644 index 811c81e391acb..0000000000000 --- a/server/src/test/java/org/opensearch/rest/action/document/RestTermVectorsActionTests.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.rest.action.document; - -import org.opensearch.common.bytes.BytesReference; -import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.rest.RestRequest; -import org.opensearch.rest.RestRequest.Method; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.junit.Before; - -import java.io.IOException; - -public class RestTermVectorsActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - controller().registerHandler(new RestTermVectorsAction()); - } - - public void testTypeInPath() { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.POST) - .withPath("/some_index/some_type/some_id/_termvectors") - .build(); - - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - - dispatchRequest(request); - assertWarnings(RestTermVectorsAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeInBody() throws IOException { - XContentBuilder content = XContentFactory.jsonBuilder().startObject().field("_type", "some_type").field("_id", 1).endObject(); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.GET) - .withPath("/some_index/_termvectors/some_id") - .withContent(BytesReference.bytes(content), XContentType.JSON) - .build(); - - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - - dispatchRequest(request); - assertWarnings(RestTermVectorsAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/server/src/test/java/org/opensearch/rest/action/document/RestUpdateActionTests.java b/server/src/test/java/org/opensearch/rest/action/document/RestUpdateActionTests.java index 5706311425e7c..bf544acf13508 100644 --- a/server/src/test/java/org/opensearch/rest/action/document/RestUpdateActionTests.java +++ b/server/src/test/java/org/opensearch/rest/action/document/RestUpdateActionTests.java @@ -38,7 +38,6 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.VersionType; import org.opensearch.rest.RestRequest; -import org.opensearch.rest.RestRequest.Method; import org.opensearch.test.rest.FakeRestRequest; import org.opensearch.test.rest.RestActionTestCase; import org.junit.Before; @@ -59,22 +58,6 @@ public void setUpAction() { controller().registerHandler(action); } - public void testTypeInPath() { - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - - RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.POST) - .withPath("/some_index/some_type/some_id/_update") - .build(); - dispatchRequest(deprecatedRequest); - assertWarnings(RestUpdateAction.TYPES_DEPRECATION_MESSAGE); - - RestRequest validRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.POST) - .withPath("/some_index/_update/some_id") - .build(); - dispatchRequest(validRequest); - } - public void testUpdateDocVersion() { Map params = new HashMap<>(); if (randomBoolean()) { diff --git a/server/src/test/java/org/opensearch/rest/action/search/RestCountActionTests.java b/server/src/test/java/org/opensearch/rest/action/search/RestCountActionTests.java deleted file mode 100644 index bc1af7f41ceed..0000000000000 --- a/server/src/test/java/org/opensearch/rest/action/search/RestCountActionTests.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.rest.action.search; - -import org.opensearch.rest.RestRequest; -import org.opensearch.rest.RestRequest.Method; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.junit.Before; - -import java.util.HashMap; -import java.util.Map; - -public class RestCountActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - controller().registerHandler(new RestCountAction()); - } - - public void testTypeInPath() { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.POST) - .withPath("/some_index/some_type/_count") - .build(); - - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - - dispatchRequest(request); - assertWarnings(RestCountAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeParameter() { - Map params = new HashMap<>(); - params.put("type", "some_type"); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.GET) - .withPath("/some_index/_count") - .withParams(params) - .build(); - - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - - dispatchRequest(request); - assertWarnings(RestCountAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/server/src/test/java/org/opensearch/rest/action/search/RestExplainActionTests.java b/server/src/test/java/org/opensearch/rest/action/search/RestExplainActionTests.java deleted file mode 100644 index 1e0668ad15262..0000000000000 --- a/server/src/test/java/org/opensearch/rest/action/search/RestExplainActionTests.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.rest.action.search; - -import org.opensearch.rest.RestRequest; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.junit.Before; - -public class RestExplainActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - controller().registerHandler(new RestExplainAction()); - } - - public void testTypeInPath() { - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteVerifier((arg1, arg2) -> null); - - RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) - .withPath("/some_index/some_type/some_id/_explain") - .build(); - dispatchRequest(deprecatedRequest); - assertWarnings(RestExplainAction.TYPES_DEPRECATION_MESSAGE); - - RestRequest validRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) - .withPath("/some_index/_explain/some_id") - .build(); - dispatchRequest(validRequest); - } -} diff --git a/server/src/test/java/org/opensearch/rest/action/search/RestMultiSearchActionTests.java b/server/src/test/java/org/opensearch/rest/action/search/RestMultiSearchActionTests.java deleted file mode 100644 index 7bec390cd7901..0000000000000 --- a/server/src/test/java/org/opensearch/rest/action/search/RestMultiSearchActionTests.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.rest.action.search; - -import org.opensearch.common.bytes.BytesArray; -import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.rest.RestRequest; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.junit.Before; - -import java.nio.charset.StandardCharsets; - -public class RestMultiSearchActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - controller().registerHandler(new RestMultiSearchAction(Settings.EMPTY)); - } - - public void testTypeInPath() { - String content = "{ \"index\": \"some_index\" } \n {} \n"; - BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8)); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) - .withPath("/some_index/some_type/_msearch") - .withContent(bytesContent, XContentType.JSON) - .build(); - - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteLocallyVerifier((arg1, arg2) -> null); - - dispatchRequest(request); - assertWarnings(RestMultiSearchAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeInBody() { - String content = "{ \"index\": \"some_index\", \"type\": \"some_type\" } \n {} \n"; - BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8)); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) - .withPath("/some_index/_msearch") - .withContent(bytesContent, XContentType.JSON) - .build(); - - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteLocallyVerifier((arg1, arg2) -> null); - - dispatchRequest(request); - assertWarnings(RestMultiSearchAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/server/src/test/java/org/opensearch/rest/action/search/RestSearchActionTests.java b/server/src/test/java/org/opensearch/rest/action/search/RestSearchActionTests.java deleted file mode 100644 index a343c93d717b9..0000000000000 --- a/server/src/test/java/org/opensearch/rest/action/search/RestSearchActionTests.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.rest.action.search; - -import org.opensearch.rest.RestRequest; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.junit.Before; - -import java.util.HashMap; -import java.util.Map; - -public class RestSearchActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - controller().registerHandler(new RestSearchAction()); - } - - public void testTypeInPath() { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) - .withPath("/some_index/some_type/_search") - .build(); - - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteLocallyVerifier((arg1, arg2) -> null); - - dispatchRequest(request); - assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeParameter() { - Map params = new HashMap<>(); - params.put("type", "some_type"); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) - .withPath("/some_index/_search") - .withParams(params) - .build(); - - // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. - verifyingClient.setExecuteLocallyVerifier((arg1, arg2) -> null); - - dispatchRequest(request); - assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java b/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java index c80c01cdc1673..48c4717f664cb 100644 --- a/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java +++ b/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java @@ -84,6 +84,7 @@ import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyString; +import static org.mockito.Mockito.anyBoolean; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.nullable; import static org.mockito.Mockito.mock; @@ -97,7 +98,6 @@ public void testPreProcess() throws Exception { when(shardSearchRequest.searchType()).thenReturn(SearchType.DEFAULT); ShardId shardId = new ShardId("index", UUID.randomUUID().toString(), 1); when(shardSearchRequest.shardId()).thenReturn(shardId); - when(shardSearchRequest.types()).thenReturn(new String[] {}); ThreadPool threadPool = new TestThreadPool(this.getClass().getName()); IndexShard indexShard = mock(IndexShard.class); @@ -123,7 +123,9 @@ public void testPreProcess() throws Exception { when(indexCache.query()).thenReturn(queryCache); when(indexService.cache()).thenReturn(indexCache); QueryShardContext queryShardContext = mock(QueryShardContext.class); - when(indexService.newQueryShardContext(eq(shardId.id()), any(), any(), nullable(String.class))).thenReturn(queryShardContext); + when(indexService.newQueryShardContext(eq(shardId.id()), any(), any(), nullable(String.class), anyBoolean())).thenReturn( + queryShardContext + ); MapperService mapperService = mock(MapperService.class); when(mapperService.hasNested()).thenReturn(randomBoolean()); when(indexService.mapperService()).thenReturn(mapperService); @@ -179,7 +181,8 @@ protected Engine.Searcher acquireSearcherInternal(String source) { timeout, null, false, - Version.CURRENT + Version.CURRENT, + false ); contextWithoutScroll.from(300); contextWithoutScroll.close(); @@ -219,7 +222,8 @@ protected Engine.Searcher acquireSearcherInternal(String source) { timeout, null, false, - Version.CURRENT + Version.CURRENT, + false ); context1.from(300); exception = expectThrows(IllegalArgumentException.class, () -> context1.preProcess(false)); @@ -287,7 +291,8 @@ protected Engine.Searcher acquireSearcherInternal(String source) { timeout, null, false, - Version.CURRENT + Version.CURRENT, + false ); SliceBuilder sliceBuilder = mock(SliceBuilder.class); @@ -324,7 +329,8 @@ protected Engine.Searcher acquireSearcherInternal(String source) { timeout, null, false, - Version.CURRENT + Version.CURRENT, + false ); ParsedQuery parsedQuery = ParsedQuery.parsedMatchAllQuery(); context3.sliceBuilder(null).parsedQuery(parsedQuery).preProcess(false); @@ -353,7 +359,8 @@ protected Engine.Searcher acquireSearcherInternal(String source) { timeout, null, false, - Version.CURRENT + Version.CURRENT, + false ); context4.sliceBuilder(new SliceBuilder(1, 2)).parsedQuery(parsedQuery).preProcess(false); Query query1 = context4.query(); @@ -381,7 +388,9 @@ public void testClearQueryCancellationsOnClose() throws IOException { IndexService indexService = mock(IndexService.class); QueryShardContext queryShardContext = mock(QueryShardContext.class); - when(indexService.newQueryShardContext(eq(shardId.id()), any(), any(), nullable(String.class))).thenReturn(queryShardContext); + when(indexService.newQueryShardContext(eq(shardId.id()), any(), any(), nullable(String.class), anyBoolean())).thenReturn( + queryShardContext + ); BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); @@ -430,7 +439,8 @@ protected Engine.Searcher acquireSearcherInternal(String source) { timeout, null, false, - Version.CURRENT + Version.CURRENT, + false ); assertThat(context.searcher().hasCancellations(), is(false)); context.searcher().addQueryCancellation(() -> {}); diff --git a/server/src/test/java/org/opensearch/search/SearchHitTests.java b/server/src/test/java/org/opensearch/search/SearchHitTests.java index cee9692747d54..8307b7faa71f1 100644 --- a/server/src/test/java/org/opensearch/search/SearchHitTests.java +++ b/server/src/test/java/org/opensearch/search/SearchHitTests.java @@ -41,7 +41,6 @@ import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.document.DocumentField; import org.opensearch.common.io.stream.Writeable; -import org.opensearch.common.text.Text; import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentParser; @@ -83,7 +82,6 @@ public static SearchHit createTestItem(boolean withOptionalInnerHits, boolean wi public static SearchHit createTestItem(XContentType xContentType, boolean withOptionalInnerHits, boolean transportSerialization) { int internalId = randomInt(); String uid = randomAlphaOfLength(10); - Text type = new Text(randomAlphaOfLengthBetween(5, 10)); NestedIdentity nestedIdentity = null; if (randomBoolean()) { nestedIdentity = NestedIdentityTests.createTestItem(randomIntBetween(0, 2)); @@ -97,7 +95,7 @@ public static SearchHit createTestItem(XContentType xContentType, boolean withOp } } - SearchHit hit = new SearchHit(internalId, uid, type, nestedIdentity, documentFields, metaFields); + SearchHit hit = new SearchHit(internalId, uid, nestedIdentity, documentFields, metaFields); if (frequently()) { if (rarely()) { hit.score(Float.NaN); @@ -234,16 +232,15 @@ public void testFromXContentWithoutTypeAndId() throws IOException { } assertEquals("my_index", parsed.getIndex()); assertEquals(1, parsed.getScore(), Float.MIN_VALUE); - assertNull(parsed.getType()); assertNull(parsed.getId()); } public void testToXContent() throws IOException { - SearchHit searchHit = new SearchHit(1, "id1", new Text("type"), Collections.emptyMap(), Collections.emptyMap()); + SearchHit searchHit = new SearchHit(1, "id1", Collections.emptyMap(), Collections.emptyMap()); searchHit.score(1.5f); XContentBuilder builder = JsonXContent.contentBuilder(); searchHit.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertEquals("{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":1.5}", Strings.toString(builder)); + assertEquals("{\"_id\":\"id1\",\"_score\":1.5}", Strings.toString(builder)); } public void testSerializeShardTarget() throws Exception { @@ -256,25 +253,25 @@ public void testSerializeShardTarget() throws Exception { ); Map innerHits = new HashMap<>(); - SearchHit innerHit1 = new SearchHit(0, "_id", new Text("_type"), null, null); + SearchHit innerHit1 = new SearchHit(0, "_id", null, null); innerHit1.shard(target); - SearchHit innerInnerHit2 = new SearchHit(0, "_id", new Text("_type"), null, null); + SearchHit innerInnerHit2 = new SearchHit(0, "_id", null, null); innerInnerHit2.shard(target); innerHits.put("1", new SearchHits(new SearchHit[] { innerInnerHit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); innerHit1.setInnerHits(innerHits); - SearchHit innerHit2 = new SearchHit(0, "_id", new Text("_type"), null, null); + SearchHit innerHit2 = new SearchHit(0, "_id", null, null); innerHit2.shard(target); - SearchHit innerHit3 = new SearchHit(0, "_id", new Text("_type"), null, null); + SearchHit innerHit3 = new SearchHit(0, "_id", null, null); innerHit3.shard(target); innerHits = new HashMap<>(); - SearchHit hit1 = new SearchHit(0, "_id", new Text("_type"), null, null); + SearchHit hit1 = new SearchHit(0, "_id", null, null); innerHits.put("1", new SearchHits(new SearchHit[] { innerHit1, innerHit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); innerHits.put("2", new SearchHits(new SearchHit[] { innerHit3 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); hit1.shard(target); hit1.setInnerHits(innerHits); - SearchHit hit2 = new SearchHit(0, "_id", new Text("_type"), null, null); + SearchHit hit2 = new SearchHit(0, "_id", null, null); hit2.shard(target); SearchHits hits = new SearchHits(new SearchHit[] { hit1, hit2 }, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1f); @@ -301,7 +298,7 @@ public void testSerializeShardTarget() throws Exception { } public void testNullSource() { - SearchHit searchHit = new SearchHit(0, "_id", new Text("_type"), null, null); + SearchHit searchHit = new SearchHit(0, "_id", null, null); assertThat(searchHit.getSourceAsMap(), nullValue()); assertThat(searchHit.getSourceRef(), nullValue()); @@ -325,7 +322,6 @@ public void testWeirdScriptFields() throws Exception { XContentType.JSON.xContent(), "{\n" + " \"_index\": \"twitter\",\n" - + " \"_type\": \"tweet\",\n" + " \"_id\": \"1\",\n" + " \"_score\": 1.0,\n" + " \"fields\": {\n" @@ -346,7 +342,6 @@ public void testWeirdScriptFields() throws Exception { XContentType.JSON.xContent(), "{\n" + " \"_index\": \"twitter\",\n" - + " \"_type\": \"tweet\",\n" + " \"_id\": \"1\",\n" + " \"_score\": 1.0,\n" + " \"fields\": {\n" @@ -371,7 +366,6 @@ public void testWeirdScriptFields() throws Exception { JsonXContent.jsonXContent, "{\n" + " \"_index\": \"twitter\",\n" - + " \"_type\": \"tweet\",\n" + " \"_id\": \"1\",\n" + " \"_score\": 1.0,\n" + " \"fields\": {\n" diff --git a/server/src/test/java/org/opensearch/search/SearchHitsTests.java b/server/src/test/java/org/opensearch/search/SearchHitsTests.java index c239e87c30e38..fc5bfc90a1e34 100644 --- a/server/src/test/java/org/opensearch/search/SearchHitsTests.java +++ b/server/src/test/java/org/opensearch/search/SearchHitsTests.java @@ -40,7 +40,6 @@ import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.Writeable; import org.opensearch.common.lucene.LuceneTests; -import org.opensearch.common.text.Text; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; @@ -248,8 +247,8 @@ protected SearchHits doParseInstance(XContentParser parser) throws IOException { public void testToXContent() throws IOException { SearchHit[] hits = new SearchHit[] { - new SearchHit(1, "id1", new Text("type"), Collections.emptyMap(), Collections.emptyMap()), - new SearchHit(2, "id2", new Text("type"), Collections.emptyMap(), Collections.emptyMap()) }; + new SearchHit(1, "id1", Collections.emptyMap(), Collections.emptyMap()), + new SearchHit(2, "id2", Collections.emptyMap(), Collections.emptyMap()) }; long totalHits = 1000; float maxScore = 1.5f; @@ -260,8 +259,8 @@ public void testToXContent() throws IOException { builder.endObject(); assertEquals( "{\"hits\":{\"total\":{\"value\":1000,\"relation\":\"eq\"},\"max_score\":1.5," - + "\"hits\":[{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":null}," - + "{\"_type\":\"type\",\"_id\":\"id2\",\"_score\":null}]}}", + + "\"hits\":[{\"_id\":\"id1\",\"_score\":null}," + + "{\"_id\":\"id2\",\"_score\":null}]}}", Strings.toString(builder) ); } @@ -269,9 +268,9 @@ public void testToXContent() throws IOException { public void testFromXContentWithShards() throws IOException { for (boolean withExplanation : new boolean[] { true, false }) { final SearchHit[] hits = new SearchHit[] { - new SearchHit(1, "id1", new Text("type"), Collections.emptyMap(), Collections.emptyMap()), - new SearchHit(2, "id2", new Text("type"), Collections.emptyMap(), Collections.emptyMap()), - new SearchHit(10, "id10", new Text("type"), Collections.emptyMap(), Collections.emptyMap()) }; + new SearchHit(1, "id1", Collections.emptyMap(), Collections.emptyMap()), + new SearchHit(2, "id2", Collections.emptyMap(), Collections.emptyMap()), + new SearchHit(10, "id10", Collections.emptyMap(), Collections.emptyMap()) }; for (SearchHit hit : hits) { String index = randomAlphaOfLengthBetween(5, 10); diff --git a/server/src/test/java/org/opensearch/search/SearchModuleTests.java b/server/src/test/java/org/opensearch/search/SearchModuleTests.java index 19b61275b8f62..05d4153949f9a 100644 --- a/server/src/test/java/org/opensearch/search/SearchModuleTests.java +++ b/server/src/test/java/org/opensearch/search/SearchModuleTests.java @@ -459,7 +459,6 @@ public List> getRescorers() { "term", "terms", "terms_set", - "type", "wildcard", "wrapper", "distance_feature" }; diff --git a/server/src/test/java/org/opensearch/search/SearchServiceTests.java b/server/src/test/java/org/opensearch/search/SearchServiceTests.java index 5575e27619fa9..4e342875e4599 100644 --- a/server/src/test/java/org/opensearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/opensearch/search/SearchServiceTests.java @@ -222,7 +222,7 @@ protected Settings nodeSettings() { public void testClearOnClose() { createIndex("index"); - client().prepareIndex("index", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); assertThat(searchResponse.getScrollId(), is(notNullValue())); SearchService service = getInstanceFromNode(SearchService.class); @@ -234,7 +234,7 @@ public void testClearOnClose() { public void testClearOnStop() { createIndex("index"); - client().prepareIndex("index", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); assertThat(searchResponse.getScrollId(), is(notNullValue())); SearchService service = getInstanceFromNode(SearchService.class); @@ -246,7 +246,7 @@ public void testClearOnStop() { public void testClearIndexDelete() { createIndex("index"); - client().prepareIndex("index", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); assertThat(searchResponse.getScrollId(), is(notNullValue())); SearchService service = getInstanceFromNode(SearchService.class); @@ -259,7 +259,7 @@ public void testClearIndexDelete() { public void testCloseSearchContextOnRewriteException() { // if refresh happens while checking the exception, the subsequent reference count might not match, so we switch it off createIndex("index", Settings.builder().put("index.refresh_interval", -1).build()); - client().prepareIndex("index", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); SearchService service = getInstanceFromNode(SearchService.class); IndicesService indicesService = getInstanceFromNode(IndicesService.class); @@ -278,7 +278,7 @@ public void testCloseSearchContextOnRewriteException() { public void testSearchWhileIndexDeleted() throws InterruptedException { createIndex("index"); - client().prepareIndex("index", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); SearchService service = getInstanceFromNode(SearchService.class); IndicesService indicesService = getInstanceFromNode(IndicesService.class); @@ -302,7 +302,7 @@ public void run() { } catch (InterruptedException e) { throw new AssertionError(e); } - client().prepareIndex("index", "type") + client().prepareIndex("index") .setSource("field", "value") .setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values())) .execute(new ActionListener() { @@ -387,7 +387,7 @@ public void onFailure(Exception e) { public void testSearchWhileIndexDeletedDoesNotLeakSearchContext() throws ExecutionException, InterruptedException { createIndex("index"); - client().prepareIndex("index", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService indexService = indicesService.indexServiceSafe(resolveIndex("index")); @@ -633,7 +633,7 @@ public void testIgnoreScriptfieldIfSizeZero() throws IOException { */ public void testMaxOpenScrollContexts() throws Exception { createIndex("index"); - client().prepareIndex("index", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); final SearchService service = getInstanceFromNode(SearchService.class); final IndicesService indicesService = getInstanceFromNode(IndicesService.class); @@ -958,7 +958,7 @@ public void testCanMatch() throws Exception { ).canMatch() ); // the source can match and can be rewritten to a match_none, but not the alias filter - final IndexResponse response = client().prepareIndex("index", "_doc", "1").setSource("id", "1").get(); + final IndexResponse response = client().prepareIndex("index").setId("1").setSource("id", "1").get(); assertEquals(RestStatus.CREATED, response.status()); searchRequest.indices("alias").source(new SearchSourceBuilder().query(new TermQueryBuilder("id", "1"))); assertFalse( @@ -1050,7 +1050,7 @@ public void testSetSearchThrottled() { final SearchService service = getInstanceFromNode(SearchService.class); Index index = resolveIndex("throttled_threadpool_index"); assertTrue(service.getIndicesService().indexServiceSafe(index).getIndexSettings().isSearchThrottled()); - client().prepareIndex("throttled_threadpool_index", "_doc", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("throttled_threadpool_index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); SearchResponse searchResponse = client().prepareSearch("throttled_threadpool_index") .setIndicesOptions(IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED) .setSize(1) @@ -1104,7 +1104,7 @@ public void testExpandSearchThrottled() { ) ).actionGet(); - client().prepareIndex("throttled_threadpool_index", "_doc", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("throttled_threadpool_index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); assertHitCount(client().prepareSearch().get(), 1L); assertHitCount(client().prepareSearch().setIndicesOptions(IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED).get(), 1L); } @@ -1116,7 +1116,7 @@ public void testExpandSearchFrozen() { new InternalOrPrivateSettingsPlugin.UpdateInternalOrPrivateAction.Request("frozen_index", "index.frozen", "true") ).actionGet(); - client().prepareIndex("frozen_index", "_doc", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("frozen_index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); assertHitCount(client().prepareSearch().get(), 0L); assertHitCount(client().prepareSearch().setIndicesOptions(IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED).get(), 1L); } @@ -1180,7 +1180,7 @@ public void testCreateSearchContextFailure() throws Exception { final IndexService indexService = createIndex(index); final SearchService service = getInstanceFromNode(SearchService.class); final ShardId shardId = new ShardId(indexService.index(), 0); - final ShardSearchRequest request = new ShardSearchRequest(shardId, new String[0], 0, null) { + final ShardSearchRequest request = new ShardSearchRequest(shardId, 0, null) { @Override public SearchType searchType() { // induce an artificial NPE @@ -1315,7 +1315,7 @@ public void testDeleteIndexWhileSearch() throws Exception { createIndex("test"); int numDocs = randomIntBetween(1, 20); for (int i = 0; i < numDocs; i++) { - client().prepareIndex("test", "_doc").setSource("f", "v").get(); + client().prepareIndex("test").setSource("f", "v").get(); } client().admin().indices().prepareRefresh("test").get(); AtomicBoolean stopped = new AtomicBoolean(false); diff --git a/server/src/test/java/org/opensearch/search/aggregations/AggregationCollectorTests.java b/server/src/test/java/org/opensearch/search/aggregations/AggregationCollectorTests.java index c392e588319af..c48e99e16db2e 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/AggregationCollectorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/AggregationCollectorTests.java @@ -44,7 +44,7 @@ public class AggregationCollectorTests extends OpenSearchSingleNodeTestCase { public void testNeedsScores() throws Exception { IndexService index = createIndex("idx"); - client().prepareIndex("idx", "type", "1").setSource("f", 5).execute().get(); + client().prepareIndex("idx").setId("1").setSource("f", 5).execute().get(); client().admin().indices().prepareRefresh("idx").get(); // simple field aggregation, no scores needed diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java index d48a1139d8678..bf16f70d400fc 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java @@ -102,11 +102,11 @@ protected void indexData() throws Exception { indexRandom(true, docs); - SearchResponse resp = client().prepareSearch("idx").setTypes("type").setRouting(routing1).setQuery(matchAllQuery()).get(); + SearchResponse resp = client().prepareSearch("idx").setRouting(routing1).setQuery(matchAllQuery()).get(); assertSearchResponse(resp); long totalOnOne = resp.getHits().getTotalHits().value; assertThat(totalOnOne, is(15L)); - resp = client().prepareSearch("idx").setTypes("type").setRouting(routing2).setQuery(matchAllQuery()).get(); + resp = client().prepareSearch("idx").setRouting(routing2).setQuery(matchAllQuery()).get(); assertSearchResponse(resp); long totalOnTwo = resp.getHits().getTotalHits().value; assertThat(totalOnTwo, is(12L)); @@ -115,7 +115,7 @@ protected void indexData() throws Exception { protected List indexDoc(String shard, String key, int times) throws Exception { IndexRequestBuilder[] builders = new IndexRequestBuilder[times]; for (int i = 0; i < times; i++) { - builders[i] = client().prepareIndex("idx", "type") + builders[i] = client().prepareIndex("idx") .setRouting(shard) .setSource(jsonBuilder().startObject().field("key", key).field("value", 1).endObject()); } diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java index c1e5c69fbb974..9243a1ccd517e 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java @@ -137,7 +137,7 @@ public void setupSuiteScopeCluster() throws Exception { multiVal[0] = multiValues[i % numUniqueGeoPoints]; multiVal[1] = multiValues[(i + 1) % numUniqueGeoPoints]; builders.add( - client().prepareIndex(IDX_NAME, "type") + client().prepareIndex(IDX_NAME) .setSource( jsonBuilder().startObject() .array(SINGLE_VALUED_FIELD_NAME, singleVal.lon(), singleVal.lat()) @@ -193,7 +193,7 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < 5; i++) { builders.add( - client().prepareIndex(DATELINE_IDX_NAME, "type") + client().prepareIndex(DATELINE_IDX_NAME) .setSource( jsonBuilder().startObject() .array(SINGLE_VALUED_FIELD_NAME, geoValues[i].lon(), geoValues[i].lat()) @@ -221,7 +221,7 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < 2000; i++) { singleVal = singleValues[i % numUniqueGeoPoints]; builders.add( - client().prepareIndex(HIGH_CARD_IDX_NAME, "type") + client().prepareIndex(HIGH_CARD_IDX_NAME) .setSource( jsonBuilder().startObject() .array(SINGLE_VALUED_FIELD_NAME, singleVal.lon(), singleVal.lat()) @@ -244,7 +244,7 @@ public void setupSuiteScopeCluster() throws Exception { } builders.add( - client().prepareIndex(IDX_ZERO_NAME, "type") + client().prepareIndex(IDX_ZERO_NAME) .setSource(jsonBuilder().startObject().array(SINGLE_VALUED_FIELD_NAME, 0.0, 1.0).endObject()) ); assertAcked(prepareCreate(IDX_ZERO_NAME).addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=geo_point")); @@ -269,7 +269,6 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < totalHits; i++) { SearchHit searchHit = response.getHits().getAt(i); assertThat("Hit " + i + " with id: " + searchHit.getId(), searchHit.getIndex(), equalTo("high_card_idx")); - assertThat("Hit " + i + " with id: " + searchHit.getId(), searchHit.getType(), equalTo("type")); DocumentField hitField = searchHit.field(NUMBER_FIELD_NAME); assertThat("Hit " + i + " has wrong number of values", hitField.getValues().size(), equalTo(1)); diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalTopHitsTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalTopHitsTests.java index 20807f4330bbd..07037be28a19f 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalTopHitsTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalTopHitsTests.java @@ -45,7 +45,6 @@ import org.opensearch.common.collect.Tuple; import org.opensearch.common.document.DocumentField; import org.opensearch.common.lucene.search.TopDocsAndMaxScore; -import org.opensearch.common.text.Text; import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentHelper; @@ -174,7 +173,7 @@ private InternalTopHits createTestInstance( Map searchHitFields = new HashMap<>(); scoreDocs[i] = docBuilder.apply(docId, score); - hits[i] = new SearchHit(docId, Integer.toString(i), new Text("_docs"), searchHitFields, Collections.emptyMap()); + hits[i] = new SearchHit(docId, Integer.toString(i), searchHitFields, Collections.emptyMap()); hits[i].score(score); } int totalHits = between(actualSize, 500000); @@ -224,7 +223,6 @@ protected void assertFromXContent(InternalTopHits aggregation, ParsedAggregation SearchHit actual = actualHits.get(i); assertEquals(expected.getIndex(), actual.getIndex()); - assertEquals(expected.getType(), actual.getType()); assertEquals(expected.getId(), actual.getId()); assertEquals(expected.getVersion(), actual.getVersion()); assertEquals(expected.getScore(), actual.getScore(), 0.0f); diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/SumAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/SumAggregatorTests.java index 2f48447c931b5..08a257935eb9e 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/SumAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/SumAggregatorTests.java @@ -208,31 +208,25 @@ public void testSummationAccuracy() throws IOException { } private void verifySummationOfDoubles(double[] values, double expected, double delta) throws IOException { - testAggregation( - sum("_name").field(FIELD_NAME), - new MatchAllDocsQuery(), - iw -> { - /* - * The sum agg uses a Kahan sumation on the shard to limit - * floating point errors. But it doesn't ship the sums to the - * coordinating node, so floaing point error can creep in when - * reducing many sums. The test framework aggregates each - * segment as though it were a separate shard, then reduces - * those togther. Fun. But it means we don't get the full - * accuracy of the Kahan sumation. And *that* accuracy is - * what this method is trying to test. So we have to stick - * all the documents on the same leaf. `addDocuments` does - * that. - */ - iw.addDocuments( - Arrays.stream(values) - .mapToObj(value -> singleton(new NumericDocValuesField(FIELD_NAME, NumericUtils.doubleToSortableLong(value)))) - .collect(toList()) - ); - }, - result -> assertEquals(expected, result.getValue(), delta), - defaultFieldType(NumberType.DOUBLE) - ); + testAggregation(sum("_name").field(FIELD_NAME), new MatchAllDocsQuery(), iw -> { + /* + * The sum agg uses a Kahan sumation on the shard to limit + * floating point errors. But it doesn't ship the sums to the + * coordinating node, so floaing point error can creep in when + * reducing many sums. The test framework aggregates each + * segment as though it were a separate shard, then reduces + * those togther. Fun. But it means we don't get the full + * accuracy of the Kahan sumation. And *that* accuracy is + * what this method is trying to test. So we have to stick + * all the documents on the same leaf. `addDocuments` does + * that. + */ + iw.addDocuments( + Arrays.stream(values) + .mapToObj(value -> singleton(new NumericDocValuesField(FIELD_NAME, NumericUtils.doubleToSortableLong(value)))) + .collect(toList()) + ); + }, result -> assertEquals(expected, result.getValue(), delta), defaultFieldType(NumberType.DOUBLE)); } public void testUnmapped() throws IOException { diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/TopHitsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/TopHitsAggregatorTests.java index 89911f610ec4b..a8f9383c07125 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/TopHitsAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/TopHitsAggregatorTests.java @@ -98,11 +98,8 @@ public void testTopLevel() throws Exception { SearchHits searchHits = ((TopHits) result).getHits(); assertEquals(3L, searchHits.getTotalHits().value); assertEquals("3", searchHits.getAt(0).getId()); - assertEquals("type", searchHits.getAt(0).getType()); assertEquals("2", searchHits.getAt(1).getId()); - assertEquals("type", searchHits.getAt(1).getType()); assertEquals("1", searchHits.getAt(2).getId()); - assertEquals("type", searchHits.getAt(2).getType()); assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) result))); } diff --git a/server/src/test/java/org/opensearch/search/aggregations/support/ScriptValuesTests.java b/server/src/test/java/org/opensearch/search/aggregations/support/ScriptValuesTests.java index 3709f4daefaca..98e4e8f881b1b 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/support/ScriptValuesTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/support/ScriptValuesTests.java @@ -37,7 +37,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Scorable; import org.apache.lucene.util.BytesRef; -import org.opensearch.common.Strings; import org.opensearch.script.AggregationScript; import org.opensearch.search.aggregations.support.values.ScriptBytesValues; import org.opensearch.search.aggregations.support.values.ScriptDoubleValues; @@ -60,7 +59,7 @@ private static class FakeAggregationScript extends AggregationScript { int index; FakeAggregationScript(Object[][] values) { - super(Collections.emptyMap(), new SearchLookup(null, null, Strings.EMPTY_ARRAY) { + super(Collections.emptyMap(), new SearchLookup(null, null) { @Override public LeafSearchLookup getLeafSearchLookup(LeafReaderContext context) { diff --git a/server/src/test/java/org/opensearch/search/aggregations/support/ValuesSourceConfigTests.java b/server/src/test/java/org/opensearch/search/aggregations/support/ValuesSourceConfigTests.java index c28f980360a40..f866d817a7c43 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/support/ValuesSourceConfigTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/support/ValuesSourceConfigTests.java @@ -49,7 +49,7 @@ public class ValuesSourceConfigTests extends OpenSearchSingleNodeTestCase { public void testKeyword() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type", "bytes", "type=keyword"); - client().prepareIndex("index", "type", "1").setSource("bytes", "abc").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource("bytes", "abc").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null); @@ -75,7 +75,7 @@ public void testKeyword() throws Exception { public void testEmptyKeyword() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type", "bytes", "type=keyword"); - client().prepareIndex("index", "type", "1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null); @@ -106,7 +106,7 @@ public void testEmptyKeyword() throws Exception { public void testUnmappedKeyword() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type"); - client().prepareIndex("index", "type", "1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null); @@ -136,7 +136,7 @@ public void testUnmappedKeyword() throws Exception { public void testLong() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type", "long", "type=long"); - client().prepareIndex("index", "type", "1").setSource("long", 42).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource("long", 42).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null); @@ -162,7 +162,7 @@ public void testLong() throws Exception { public void testEmptyLong() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type", "long", "type=long"); - client().prepareIndex("index", "type", "1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null); @@ -193,7 +193,7 @@ public void testEmptyLong() throws Exception { public void testUnmappedLong() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type"); - client().prepareIndex("index", "type", "1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null); @@ -224,7 +224,7 @@ public void testUnmappedLong() throws Exception { public void testBoolean() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type", "bool", "type=boolean"); - client().prepareIndex("index", "type", "1").setSource("bool", true).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource("bool", true).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null); @@ -250,7 +250,7 @@ public void testBoolean() throws Exception { public void testEmptyBoolean() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type", "bool", "type=boolean"); - client().prepareIndex("index", "type", "1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null); @@ -281,7 +281,7 @@ public void testEmptyBoolean() throws Exception { public void testUnmappedBoolean() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type"); - client().prepareIndex("index", "type", "1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("index").setId("1").setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null); @@ -331,10 +331,7 @@ public void testTypeFieldDeprecation() { public void testFieldAlias() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type", "field", "type=keyword", "alias", "type=alias,path=field"); - client().prepareIndex("index", "type", "1") - .setSource("field", "value") - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .get(); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null); diff --git a/server/src/test/java/org/opensearch/search/builder/SearchSourceBuilderTests.java b/server/src/test/java/org/opensearch/search/builder/SearchSourceBuilderTests.java index 59519226685e2..66b18bc42ad50 100644 --- a/server/src/test/java/org/opensearch/search/builder/SearchSourceBuilderTests.java +++ b/server/src/test/java/org/opensearch/search/builder/SearchSourceBuilderTests.java @@ -118,6 +118,46 @@ public void testSerialization() throws IOException { } } + public void testSerializationWithPercentilesQueryObject() throws IOException { + String restContent = "{\n" + + " \"aggregations\": {" + + " \"percentiles_duration\": {\n" + + " \"percentiles\" : {\n" + + " \"field\": \"duration\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}\n"; + String expectedContent = "{\"aggregations\":{" + + "\"percentiles_duration\":{" + + "\"percentiles\":{" + + "\"field\":\"duration\"," + + "\"percents\":[1.0,5.0,25.0,50.0,75.0,95.0,99.0]," + + "\"keyed\":true," + + "\"tdigest\":{" + + "\"compression\":100.0" + + "}" + + "}" + + "}" + + "}}"; + + try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { + SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(parser); + + try (BytesStreamOutput output = new BytesStreamOutput()) { + searchSourceBuilder.writeTo(output); + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { + SearchSourceBuilder deserializedBuilder = new SearchSourceBuilder(in); + String actualContent = deserializedBuilder.toString(); + + assertEquals(expectedContent, actualContent); + assertEquals(searchSourceBuilder.hashCode(), deserializedBuilder.hashCode()); + assertNotSame(searchSourceBuilder, deserializedBuilder); + } + } + } + } + public void testShallowCopy() { for (int i = 0; i < 10; i++) { SearchSourceBuilder original = createSearchSourceBuilder(); diff --git a/server/src/test/java/org/opensearch/search/fetch/subphase/FetchSourcePhaseTests.java b/server/src/test/java/org/opensearch/search/fetch/subphase/FetchSourcePhaseTests.java index b74aa0e683eaf..92b398f3030e1 100644 --- a/server/src/test/java/org/opensearch/search/fetch/subphase/FetchSourcePhaseTests.java +++ b/server/src/test/java/org/opensearch/search/fetch/subphase/FetchSourcePhaseTests.java @@ -190,7 +190,7 @@ private HitContext hitExecuteMultiple( when(fetchContext.fetchSourceContext()).thenReturn(fetchSourceContext); when(fetchContext.getIndexName()).thenReturn("index"); - final SearchHit searchHit = new SearchHit(1, null, null, nestedIdentity, null, null); + final SearchHit searchHit = new SearchHit(1, null, nestedIdentity, null, null); // We don't need a real index, just a LeafReaderContext which cannot be mocked. MemoryIndex index = new MemoryIndex(); diff --git a/server/src/test/java/org/opensearch/search/geo/GeoQueryTests.java b/server/src/test/java/org/opensearch/search/geo/GeoQueryTests.java index 190cf677c10c5..b46ac4a72952b 100644 --- a/server/src/test/java/org/opensearch/search/geo/GeoQueryTests.java +++ b/server/src/test/java/org/opensearch/search/geo/GeoQueryTests.java @@ -85,12 +85,12 @@ public void testNullShape() throws Exception { client().admin().indices().prepareCreate(defaultIndexName).addMapping("_doc", xcb).get(); ensureGreen(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("aNullshape") .setSource("{\"geo\": null}", XContentType.JSON) .setRefreshPolicy(IMMEDIATE) .get(); - GetResponse result = client().prepareGet(defaultIndexName, "_doc", "aNullshape").get(); + GetResponse result = client().prepareGet(defaultIndexName, "aNullshape").get(); assertThat(result.getField("location"), nullValue()); } @@ -99,13 +99,13 @@ public void testIndexPointsFilterRectangle() throws Exception { client().admin().indices().prepareCreate(defaultIndexName).addMapping("_doc", xcb).get(); ensureGreen(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("1") .setSource(jsonBuilder().startObject().field("name", "Document 1").field(defaultGeoFieldName, "POINT(-30 -30)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("2") .setSource(jsonBuilder().startObject().field("name", "Document 2").field(defaultGeoFieldName, "POINT(-45 -50)").endObject()) .setRefreshPolicy(IMMEDIATE) @@ -137,13 +137,13 @@ public void testIndexPointsCircle() throws Exception { client().admin().indices().prepareCreate(defaultIndexName).addMapping("_doc", xcb).get(); ensureGreen(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("1") .setSource(jsonBuilder().startObject().field("name", "Document 1").field(defaultGeoFieldName, "POINT(-30 -30)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("2") .setSource(jsonBuilder().startObject().field("name", "Document 2").field(defaultGeoFieldName, "POINT(-45 -50)").endObject()) .setRefreshPolicy(IMMEDIATE) @@ -170,13 +170,13 @@ public void testIndexPointsPolygon() throws Exception { client().admin().indices().prepareCreate(defaultIndexName).addMapping("_doc", xcb).get(); ensureGreen(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("1") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(-30 -30)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("2") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(-45 -50)").endObject()) .setRefreshPolicy(IMMEDIATE) @@ -206,19 +206,19 @@ public void testIndexPointsMultiPolygon() throws Exception { client().admin().indices().prepareCreate(defaultIndexName).addMapping("_doc", xcb).get(); ensureGreen(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("1") .setSource(jsonBuilder().startObject().field("name", "Document 1").field(defaultGeoFieldName, "POINT(-30 -30)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("2") .setSource(jsonBuilder().startObject().field("name", "Document 2").field(defaultGeoFieldName, "POINT(-40 -40)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("3") .setSource(jsonBuilder().startObject().field("name", "Document 3").field(defaultGeoFieldName, "POINT(-50 -50)").endObject()) .setRefreshPolicy(IMMEDIATE) @@ -260,13 +260,13 @@ public void testIndexPointsRectangle() throws Exception { client().admin().indices().prepareCreate(defaultIndexName).addMapping("_doc", xcb).get(); ensureGreen(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("1") .setSource(jsonBuilder().startObject().field("name", "Document 1").field(defaultGeoFieldName, "POINT(-30 -30)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("2") .setSource(jsonBuilder().startObject().field("name", "Document 2").field(defaultGeoFieldName, "POINT(-45 -50)").endObject()) .setRefreshPolicy(IMMEDIATE) @@ -288,13 +288,13 @@ public void testIndexPointsIndexedRectangle() throws Exception { client().admin().indices().prepareCreate(defaultIndexName).addMapping(defaultIndexName, xcb).get(); ensureGreen(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("point1") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(-30 -30)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("point2") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(-45 -50)").endObject()) .setRefreshPolicy(IMMEDIATE) @@ -313,13 +313,13 @@ public void testIndexPointsIndexedRectangle() throws Exception { client().admin().indices().prepareCreate(indexedShapeIndex).addMapping(defaultIndexName, xcb).get(); ensureGreen(); - client().prepareIndex(indexedShapeIndex, "_doc") + client().prepareIndex(indexedShapeIndex) .setId("shape1") .setSource(jsonBuilder().startObject().field(indexedShapePath, "BBOX(-50, -40, -45, -55)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(indexedShapeIndex, "_doc") + client().prepareIndex(indexedShapeIndex) .setId("shape2") .setSource(jsonBuilder().startObject().field(indexedShapePath, "BBOX(-60, -50, -50, -60)").endObject()) .setRefreshPolicy(IMMEDIATE) @@ -355,19 +355,19 @@ public void testRectangleSpanningDateline() throws Exception { client().admin().indices().prepareCreate("test").addMapping("_doc", xcb).get(); ensureGreen(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("1") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(-169 0)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("2") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(-179 0)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("3") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(171 0)").endObject()) .setRefreshPolicy(IMMEDIATE) @@ -388,25 +388,25 @@ public void testPolygonSpanningDateline() throws Exception { client().admin().indices().prepareCreate("test").addMapping("_doc", xcb).get(); ensureGreen(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("1") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(-169 7)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("2") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(-179 7)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("3") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(179 7)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("4") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(171 7)").endObject()) .setRefreshPolicy(IMMEDIATE) @@ -432,19 +432,19 @@ public void testMultiPolygonSpanningDateline() throws Exception { client().admin().indices().prepareCreate("test").addMapping("_doc", xcb).get(); ensureGreen(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("1") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(-169 7)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("2") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(-179 7)").endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex(defaultIndexName, "_doc") + client().prepareIndex(defaultIndexName) .setId("3") .setSource(jsonBuilder().startObject().field(defaultGeoFieldName, "POINT(171 7)").endObject()) .setRefreshPolicy(IMMEDIATE) diff --git a/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java b/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java index 0b925811161c2..1722cb564e231 100644 --- a/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java +++ b/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java @@ -152,14 +152,16 @@ public void testShapeFetchingPath() throws Exception { String location = "\"geo\" : {\"type\":\"polygon\", \"coordinates\":[[[-10,-10],[10,-10],[10,10],[-10,10],[-10,-10]]]}"; - client().prepareIndex("shapes", "type", "1") + client().prepareIndex("shapes") + .setId("1") .setSource( String.format(Locale.ROOT, "{ %s, \"1\" : { %s, \"2\" : { %s, \"3\" : { %s } }} }", location, location, location, location), XContentType.JSON ) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .startObject("geo") @@ -264,7 +266,7 @@ public void testRandomGeoCollectionQuery() throws Exception { ensureGreen(); XContentBuilder docSource = gcb.toXContent(jsonBuilder().startObject().field("geo"), null).endObject(); - client().prepareIndex("test", "_doc").setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); // Create a random geometry collection to query GeometryCollectionBuilder queryCollection = RandomShapeGenerator.createGeometryCollection(random()); @@ -430,19 +432,18 @@ public void testGeometryCollectionRelations() throws Exception { public void testEdgeCases() throws Exception { XContentBuilder xcb = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("geo") .field("type", "geo_shape") .endObject() .endObject() - .endObject() .endObject(); String mapping = Strings.toString(xcb); - client().admin().indices().prepareCreate("test").addMapping("type1", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("test").setMapping(mapping).get(); ensureGreen(); - client().prepareIndex("test", "type1", "blakely") + client().prepareIndex("test") + .setId("blakely") .setSource( jsonBuilder().startObject() .field("name", "Blakely Island") @@ -478,7 +479,7 @@ public void testEdgeCases() throws Exception { // This search would fail if both geoshape indexing and geoshape filtering // used the bottom-level optimization in SpatialPrefixTree#recursiveGetNodes. - SearchResponse searchResponse = client().prepareSearch("test").setTypes("type1").setQuery(geoIntersectionQuery("geo", query)).get(); + SearchResponse searchResponse = client().prepareSearch("test").setQuery(geoIntersectionQuery("geo", query)).get(); assertSearchResponse(searchResponse); assertHitCount(searchResponse, 1); @@ -493,7 +494,8 @@ public void testIndexedShapeReferenceSourceDisabled() throws Exception { EnvelopeBuilder shape = new EnvelopeBuilder(new Coordinate(-45, 45), new Coordinate(45, -45)); - client().prepareIndex("shapes", "shape_type", "Big_Rectangle") + client().prepareIndex("shapes") + .setId("Big_Rectangle") .setSource(jsonBuilder().startObject().field("shape", shape).endObject()) .setRefreshPolicy(IMMEDIATE) .get(); @@ -546,7 +548,7 @@ public void testPointQuery() throws Exception { .actionGet(); } XContentBuilder docSource = gcb.toXContent(jsonBuilder().startObject().field("geo"), null).endObject(); - client().prepareIndex("test", "type", "1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); GeoShapeQueryBuilder geoShapeQueryBuilder = QueryBuilders.geoShapeQuery("geo", pb); geoShapeQueryBuilder.relation(ShapeRelation.INTERSECTS); @@ -587,7 +589,7 @@ public void testContainsShapeQuery() throws Exception { } XContentBuilder docSource = gcb.toXContent(jsonBuilder().startObject().field("geo"), null).endObject(); - client().prepareIndex("test", "type", "1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); // index the mbr of the collection EnvelopeBuilder env = new EnvelopeBuilder( @@ -595,7 +597,7 @@ public void testContainsShapeQuery() throws Exception { new Coordinate(mbr.getMaxX(), mbr.getMinY()) ); docSource = env.toXContent(jsonBuilder().startObject().field("geo"), null).endObject(); - client().prepareIndex("test", "type", "2").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("2").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); ShapeBuilder filterShape = (gcb.getShapeAt(randomIntBetween(0, gcb.numShapes() - 1))); GeoShapeQueryBuilder filter = QueryBuilders.geoShapeQuery("geo", filterShape).relation(ShapeRelation.CONTAINS); @@ -613,7 +615,7 @@ public void testExistsQuery() throws Exception { client().admin().indices().prepareCreate("test").addMapping("type", builder).execute().actionGet(); XContentBuilder docSource = gcb.toXContent(jsonBuilder().startObject().field("geo"), null).endObject(); - client().prepareIndex("test", "type", "1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); ExistsQueryBuilder eqb = QueryBuilders.existsQuery("geo"); SearchResponse result = client().prepareSearch("test").setQuery(eqb).get(); @@ -625,7 +627,6 @@ public void testPointsOnly() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("location") .field("type", "geo_shape") @@ -636,15 +637,15 @@ public void testPointsOnly() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - client().admin().indices().prepareCreate("geo_points_only").addMapping("type1", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("geo_points_only").setMapping(mapping).get(); ensureGreen(); ShapeBuilder shape = RandomShapeGenerator.createShape(random()); try { - client().prepareIndex("geo_points_only", "type1", "1") + client().prepareIndex("geo_points_only") + .setId("1") .setSource(jsonBuilder().startObject().field("location", shape).endObject()) .setRefreshPolicy(IMMEDIATE) .get(); @@ -655,10 +656,7 @@ public void testPointsOnly() throws Exception { } // test that point was inserted - SearchResponse response = client().prepareSearch("geo_points_only") - .setTypes("type1") - .setQuery(geoIntersectionQuery("location", shape)) - .get(); + SearchResponse response = client().prepareSearch("geo_points_only").setQuery(geoIntersectionQuery("location", shape)).get(); assertHitCount(response, 1); } @@ -667,7 +665,6 @@ public void testPointsOnlyExplicit() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("geo") .field("type", "geo_shape") @@ -678,45 +675,48 @@ public void testPointsOnlyExplicit() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - client().admin().indices().prepareCreate("geo_points_only").addMapping("type1", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("geo_points_only").setMapping(mapping).get(); ensureGreen(); // MULTIPOINT ShapeBuilder shape = RandomShapeGenerator.createShape(random(), RandomShapeGenerator.ShapeType.MULTIPOINT); - client().prepareIndex("geo_points_only", "type1", "1") + client().prepareIndex("geo_points_only") + .setId("1") .setSource(jsonBuilder().startObject().field("geo", shape).endObject()) .setRefreshPolicy(IMMEDIATE) .get(); // POINT shape = RandomShapeGenerator.createShape(random(), RandomShapeGenerator.ShapeType.POINT); - client().prepareIndex("geo_points_only", "type1", "2") + client().prepareIndex("geo_points_only") + .setId("2") .setSource(jsonBuilder().startObject().field("geo", shape).endObject()) .setRefreshPolicy(IMMEDIATE) .get(); // test that point was inserted - SearchResponse response = client().prepareSearch("geo_points_only").setTypes("type1").setQuery(matchAllQuery()).get(); + SearchResponse response = client().prepareSearch("geo_points_only").setQuery(matchAllQuery()).get(); assertHitCount(response, 2); } public void testIndexedShapeReference() throws Exception { String mapping = Strings.toString(createDefaultMapping()); - client().admin().indices().prepareCreate("test").addMapping("type1", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("test").setMapping(mapping).get(); createIndex("shapes"); ensureGreen(); EnvelopeBuilder shape = new EnvelopeBuilder(new Coordinate(-45, 45), new Coordinate(45, -45)); - client().prepareIndex("shapes", "shape_type", "Big_Rectangle") + client().prepareIndex("shapes") + .setId("Big_Rectangle") .setSource(jsonBuilder().startObject().field("shape", shape).endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - client().prepareIndex("test", "type1", "1") + client().prepareIndex("test") + .setId("1") .setSource( jsonBuilder().startObject() .field("name", "Document 1") @@ -765,7 +765,8 @@ public void testFieldAlias() throws IOException { createIndex("test", Settings.EMPTY, "type", mapping); ShapeBuilder shape = RandomShapeGenerator.createShape(random(), RandomShapeGenerator.ShapeType.MULTIPOINT); - client().prepareIndex("test", "type", "1") + client().prepareIndex("test") + .setId("1") .setSource(jsonBuilder().startObject().field("location", shape).endObject()) .setRefreshPolicy(IMMEDIATE) .get(); @@ -788,7 +789,7 @@ public void testQueryRandomGeoCollection() throws Exception { client().admin().indices().prepareCreate("test").addMapping("type", builder).get(); XContentBuilder docSource = gcb.toXContent(jsonBuilder().startObject().field("geo"), null).endObject(); - client().prepareIndex("test", "type", "1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); ShapeBuilder filterShape = (gcb.getShapeAt(gcb.numShapes() - 1)); @@ -835,7 +836,7 @@ public void testShapeFilterWithDefinedGeoCollection() throws Exception { .endArray() .endObject() .endObject(); - client().prepareIndex("test", "type", "1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); GeoShapeQueryBuilder filter = QueryBuilders.geoShapeQuery( "geo", @@ -900,7 +901,7 @@ public void testShapeFilterWithDefinedGeoCollection() throws Exception { public void testDistanceQuery() throws Exception { String mapping = Strings.toString(createRandomMapping()); - client().admin().indices().prepareCreate("test_distance").addMapping("type1", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("test_distance").setMapping(mapping).get(); ensureGreen(); CircleBuilder circleBuilder = new CircleBuilder().center(new Coordinate(1, 0)).radius(350, DistanceUnit.KILOMETERS); @@ -943,13 +944,13 @@ public void testDistanceQuery() throws Exception { public void testIndexRectangleSpanningDateLine() throws Exception { String mapping = Strings.toString(createRandomMapping()); - client().admin().indices().prepareCreate("test").addMapping("type1", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("test").setMapping(mapping).get(); ensureGreen(); EnvelopeBuilder envelopeBuilder = new EnvelopeBuilder(new Coordinate(178, 10), new Coordinate(-178, -10)); XContentBuilder docSource = envelopeBuilder.toXContent(jsonBuilder().startObject().field("geo"), null).endObject(); - client().prepareIndex("test", "type1", "1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test").setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); ShapeBuilder filterShape = new PointBuilder(179, 0); diff --git a/server/src/test/java/org/opensearch/search/lookup/LeafDocLookupTests.java b/server/src/test/java/org/opensearch/search/lookup/LeafDocLookupTests.java index 96fdf6a6da241..b047f16583ee5 100644 --- a/server/src/test/java/org/opensearch/search/lookup/LeafDocLookupTests.java +++ b/server/src/test/java/org/opensearch/search/lookup/LeafDocLookupTests.java @@ -39,7 +39,6 @@ import org.opensearch.test.OpenSearchTestCase; import org.junit.Before; -import static org.opensearch.search.lookup.LeafDocLookup.TYPES_DEPRECATION_MESSAGE; import static org.mockito.AdditionalAnswers.returnsFirstArg; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doReturn; @@ -59,14 +58,13 @@ public void setUp() throws Exception { when(fieldType.valueForDisplay(any())).then(returnsFirstArg()); MapperService mapperService = mock(MapperService.class); - when(mapperService.fieldType("_type")).thenReturn(fieldType); when(mapperService.fieldType("field")).thenReturn(fieldType); when(mapperService.fieldType("alias")).thenReturn(fieldType); docValues = mock(ScriptDocValues.class); IndexFieldData fieldData = createFieldData(docValues); - docLookup = new LeafDocLookup(mapperService, ignored -> fieldData, new String[] { "type" }, null); + docLookup = new LeafDocLookup(mapperService, ignored -> fieldData, null); } public void testBasicLookup() { @@ -79,12 +77,6 @@ public void testFieldAliases() { assertEquals(docValues, fetchedDocValues); } - public void testTypesDeprecation() { - ScriptDocValues fetchedDocValues = docLookup.get("_type"); - assertEquals(docValues, fetchedDocValues); - assertWarnings(TYPES_DEPRECATION_MESSAGE); - } - private IndexFieldData createFieldData(ScriptDocValues scriptDocValues) { LeafFieldData leafFieldData = mock(LeafFieldData.class); doReturn(scriptDocValues).when(leafFieldData).getScriptValues(); diff --git a/server/src/test/java/org/opensearch/search/lookup/LeafFieldsLookupTests.java b/server/src/test/java/org/opensearch/search/lookup/LeafFieldsLookupTests.java index 08c1d027385ea..cc7200bbf78c4 100644 --- a/server/src/test/java/org/opensearch/search/lookup/LeafFieldsLookupTests.java +++ b/server/src/test/java/org/opensearch/search/lookup/LeafFieldsLookupTests.java @@ -90,7 +90,7 @@ public void setUp() throws Exception { return null; }).when(leafReader).document(anyInt(), any(StoredFieldVisitor.class)); - fieldsLookup = new LeafFieldsLookup(mapperService, new String[] { "type" }, leafReader); + fieldsLookup = new LeafFieldsLookup(mapperService, leafReader); } public void testBasicLookup() { diff --git a/server/src/test/java/org/opensearch/search/query/ScriptScoreQueryTests.java b/server/src/test/java/org/opensearch/search/query/ScriptScoreQueryTests.java index c80ce807bf736..e1002e114822e 100644 --- a/server/src/test/java/org/opensearch/search/query/ScriptScoreQueryTests.java +++ b/server/src/test/java/org/opensearch/search/query/ScriptScoreQueryTests.java @@ -110,6 +110,34 @@ public void testExplain() throws IOException { assertThat(explanation.getValue(), equalTo(1.0)); } + public void testExplainWithName() throws IOException { + Script script = new Script("script using explain"); + ScoreScript.LeafFactory factory = newFactory(script, true, explanation -> { + assertNotNull(explanation); + explanation.set("this explains the score"); + return 1.0; + }); + + ScriptScoreQuery query = new ScriptScoreQuery( + Queries.newMatchAllQuery(), + "query1", + script, + factory, + null, + "index", + 0, + Version.CURRENT + ); + Weight weight = query.createWeight(searcher, ScoreMode.COMPLETE, 1.0f); + Explanation explanation = weight.explain(leafReaderContext, 0); + assertNotNull(explanation); + assertThat(explanation.getDescription(), equalTo("this explains the score")); + assertThat(explanation.getValue(), equalTo(1.0)); + + assertThat(explanation.getDetails(), arrayWithSize(1)); + assertThat(explanation.getDetails()[0].getDescription(), equalTo("*:* (_name: query1)")); + } + public void testExplainDefault() throws IOException { Script script = new Script("script without setting explanation"); ScoreScript.LeafFactory factory = newFactory(script, true, explanation -> 1.5); diff --git a/server/src/test/java/org/opensearch/search/sort/GeoDistanceSortBuilderTests.java b/server/src/test/java/org/opensearch/search/sort/GeoDistanceSortBuilderTests.java index e8d300e1eb9e3..c14deb6add083 100644 --- a/server/src/test/java/org/opensearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/server/src/test/java/org/opensearch/search/sort/GeoDistanceSortBuilderTests.java @@ -268,7 +268,6 @@ public void testGeoDistanceSortCanBeParsedFromGeoHash() throws IOException { + " \"nested\" : {\n" + " \"filter\" : {\n" + " \"ids\" : {\n" - + " \"type\" : [ ],\n" + " \"values\" : [ ],\n" + " \"boost\" : 5.711116\n" + " }\n" diff --git a/server/src/test/java/org/opensearch/search/suggest/completion/CategoryContextMappingTests.java b/server/src/test/java/org/opensearch/search/suggest/completion/CategoryContextMappingTests.java index bbc9a0fdbe309..b2c54492b66d7 100644 --- a/server/src/test/java/org/opensearch/search/suggest/completion/CategoryContextMappingTests.java +++ b/server/src/test/java/org/opensearch/search/suggest/completion/CategoryContextMappingTests.java @@ -100,7 +100,6 @@ public void testIndexingWithNoContexts() throws Exception { ParsedDocument parsedDocument = defaultMapper.parse( new SourceToParse( "test", - "type1", "1", BytesReference.bytes( jsonBuilder().startObject() @@ -153,7 +152,6 @@ public void testIndexingWithSimpleContexts() throws Exception { ParsedDocument parsedDocument = defaultMapper.parse( new SourceToParse( "test", - "type1", "1", BytesReference.bytes( jsonBuilder().startObject() @@ -201,7 +199,6 @@ public void testIndexingWithSimpleNumberContexts() throws Exception { ParsedDocument parsedDocument = defaultMapper.parse( new SourceToParse( "test", - "type1", "1", BytesReference.bytes( jsonBuilder().startObject() @@ -249,7 +246,6 @@ public void testIndexingWithSimpleBooleanContexts() throws Exception { ParsedDocument parsedDocument = defaultMapper.parse( new SourceToParse( "test", - "type1", "1", BytesReference.bytes( jsonBuilder().startObject() @@ -307,7 +303,7 @@ public void testIndexingWithSimpleNULLContexts() throws Exception { Exception e = expectThrows( MapperParsingException.class, - () -> defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON)) + () -> defaultMapper.parse(new SourceToParse("test", "1", BytesReference.bytes(builder), XContentType.JSON)) ); assertEquals( "contexts must be a string, number or boolean or a list of string, number or boolean, but was [VALUE_NULL]", @@ -341,7 +337,6 @@ public void testIndexingWithContextList() throws Exception { ParsedDocument parsedDocument = defaultMapper.parse( new SourceToParse( "test", - "type1", "1", BytesReference.bytes( jsonBuilder().startObject() @@ -387,7 +382,6 @@ public void testIndexingWithMixedTypeContextList() throws Exception { ParsedDocument parsedDocument = defaultMapper.parse( new SourceToParse( "test", - "type1", "1", BytesReference.bytes( jsonBuilder().startObject() @@ -441,7 +435,7 @@ public void testIndexingWithMixedTypeContextListHavingNULL() throws Exception { Exception e = expectThrows( MapperParsingException.class, - () -> defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON)) + () -> defaultMapper.parse(new SourceToParse("test", "1", BytesReference.bytes(builder), XContentType.JSON)) ); assertEquals("context array must have string, number or boolean values, but was [VALUE_NULL]", e.getCause().getMessage()); } @@ -486,7 +480,7 @@ public void testIndexingWithMultipleContexts() throws Exception { .endArray() .endObject(); ParsedDocument parsedDocument = defaultMapper.parse( - new SourceToParse("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON) + new SourceToParse("test", "1", BytesReference.bytes(builder), XContentType.JSON) ); IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertContextSuggestFields(fields, 3); diff --git a/server/src/test/java/org/opensearch/search/suggest/completion/GeoContextMappingTests.java b/server/src/test/java/org/opensearch/search/suggest/completion/GeoContextMappingTests.java index b238bee976a96..31cc2e73ff2a3 100644 --- a/server/src/test/java/org/opensearch/search/suggest/completion/GeoContextMappingTests.java +++ b/server/src/test/java/org/opensearch/search/suggest/completion/GeoContextMappingTests.java @@ -64,7 +64,6 @@ public class GeoContextMappingTests extends OpenSearchSingleNodeTestCase { public void testIndexingWithNoContexts() throws Exception { XContentBuilder mapping = jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("completion") .field("type", "completion") @@ -76,16 +75,14 @@ public void testIndexingWithNoContexts() throws Exception { .endArray() .endObject() .endObject() - .endObject() .endObject(); - MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); + MapperService mapperService = createIndex("test", Settings.EMPTY, MapperService.SINGLE_MAPPING_NAME, mapping).mapperService(); MappedFieldType completionFieldType = mapperService.fieldType("completion"); ParsedDocument parsedDocument = mapperService.documentMapper() .parse( new SourceToParse( "test", - "type1", "1", BytesReference.bytes( jsonBuilder().startObject() @@ -114,7 +111,6 @@ public void testIndexingWithNoContexts() throws Exception { public void testIndexingWithSimpleContexts() throws Exception { XContentBuilder mapping = jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("completion") .field("type", "completion") @@ -126,16 +122,14 @@ public void testIndexingWithSimpleContexts() throws Exception { .endArray() .endObject() .endObject() - .endObject() .endObject(); - MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); + MapperService mapperService = createIndex("test", Settings.EMPTY, MapperService.SINGLE_MAPPING_NAME, mapping).mapperService(); MappedFieldType completionFieldType = mapperService.fieldType("completion"); ParsedDocument parsedDocument = mapperService.documentMapper() .parse( new SourceToParse( "test", - "type1", "1", BytesReference.bytes( jsonBuilder().startObject() @@ -162,7 +156,6 @@ public void testIndexingWithSimpleContexts() throws Exception { public void testIndexingWithContextList() throws Exception { XContentBuilder mapping = jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("completion") .field("type", "completion") @@ -174,16 +167,14 @@ public void testIndexingWithContextList() throws Exception { .endArray() .endObject() .endObject() - .endObject() .endObject(); - MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); + MapperService mapperService = createIndex("test", Settings.EMPTY, MapperService.SINGLE_MAPPING_NAME, mapping).mapperService(); MappedFieldType completionFieldType = mapperService.fieldType("completion"); ParsedDocument parsedDocument = mapperService.documentMapper() .parse( new SourceToParse( "test", - "type1", "1", BytesReference.bytes( jsonBuilder().startObject() @@ -214,7 +205,6 @@ public void testIndexingWithContextList() throws Exception { public void testIndexingWithMultipleContexts() throws Exception { XContentBuilder mapping = jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("completion") .field("type", "completion") @@ -230,10 +220,9 @@ public void testIndexingWithMultipleContexts() throws Exception { .endArray() .endObject() .endObject() - .endObject() .endObject(); - MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); + MapperService mapperService = createIndex("test", Settings.EMPTY, MapperService.SINGLE_MAPPING_NAME, mapping).mapperService(); MappedFieldType completionFieldType = mapperService.fieldType("completion"); XContentBuilder builder = jsonBuilder().startObject() .startArray("completion") @@ -248,7 +237,7 @@ public void testIndexingWithMultipleContexts() throws Exception { .endArray() .endObject(); ParsedDocument parsedDocument = mapperService.documentMapper() - .parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON)); + .parse(new SourceToParse("test", "1", BytesReference.bytes(builder), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } diff --git a/server/src/test/java/org/opensearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java b/server/src/test/java/org/opensearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java index 45139cedeeb37..c2c2728a348f5 100644 --- a/server/src/test/java/org/opensearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java +++ b/server/src/test/java/org/opensearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java @@ -73,7 +73,6 @@ public static void aggregateAndCheckFromSeveralShards(OpenSearchIntegTestCase te private static void checkSignificantTermsAggregationCorrect(OpenSearchIntegTestCase testCase) { SearchResponse response = client().prepareSearch(INDEX_NAME) - .setTypes(DOC_TYPE) .addAggregation(terms("class").field(CLASS_FIELD).subAggregation(significantTerms("sig_terms").field(TEXT_FIELD))) .execute() .actionGet(); @@ -105,13 +104,13 @@ public static void index01Docs(String type, String settings, OpenSearchIntegTest ); String[] gb = { "0", "1" }; List indexRequestBuilderList = new ArrayList<>(); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "1").setSource(TEXT_FIELD, "1", CLASS_FIELD, "1")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "2").setSource(TEXT_FIELD, "1", CLASS_FIELD, "1")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "3").setSource(TEXT_FIELD, "0", CLASS_FIELD, "0")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "4").setSource(TEXT_FIELD, "0", CLASS_FIELD, "0")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "5").setSource(TEXT_FIELD, gb, CLASS_FIELD, "1")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "6").setSource(TEXT_FIELD, gb, CLASS_FIELD, "0")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "7").setSource(TEXT_FIELD, "0", CLASS_FIELD, "0")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("1").setSource(TEXT_FIELD, "1", CLASS_FIELD, "1")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("2").setSource(TEXT_FIELD, "1", CLASS_FIELD, "1")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("3").setSource(TEXT_FIELD, "0", CLASS_FIELD, "0")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("4").setSource(TEXT_FIELD, "0", CLASS_FIELD, "0")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("5").setSource(TEXT_FIELD, gb, CLASS_FIELD, "1")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("6").setSource(TEXT_FIELD, gb, CLASS_FIELD, "0")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("7").setSource(TEXT_FIELD, "0", CLASS_FIELD, "0")); testCase.indexRandom(true, false, indexRequestBuilderList); } } diff --git a/server/src/test/resources/org/opensearch/action/bulk/bulk-log.json b/server/src/test/resources/org/opensearch/action/bulk/bulk-log.json index 9c3663c3f63bc..05fccca8ca91d 100644 --- a/server/src/test/resources/org/opensearch/action/bulk/bulk-log.json +++ b/server/src/test/resources/org/opensearch/action/bulk/bulk-log.json @@ -1,24 +1,24 @@ -{"index":{"_index":"logstash-2014.03.30","_type":"logs"}} +{"index":{"_index":"logstash-2014.03.30"}} {"message":"in24.inetnebr.com--[01/Aug/1995:00:00:01-0400]\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"2001839","@version":"1","@timestamp":"2014-03-30T12:38:10.048Z","host":["romeo","in24.inetnebr.com"],"monthday":1,"month":8,"year":1995,"time":"00:00:01","tz":"-0400","request":"\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"","httpresponse":"200","size":1839,"rtime":"1995-08-01T00:00:01.000Z"} -{"index":{"_index":"logstash-2014.03.30","_type":"logs"}} +{"index":{"_index":"logstash-2014.03.30"}} {"message":"in24.inetnebr.com--[01/Aug/1995:00:00:01-0400]\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"2001839","@version":"1","@timestamp":"2014-03-30T12:38:10.048Z","host":["romeo","in24.inetnebr.com"],"monthday":1,"month":8,"year":1995,"time":"00:00:01","tz":"-0400","request":"\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"","httpresponse":"200","size":1839,"rtime":"1995-08-01T00:00:01.000Z"} -{"index":{"_index":"logstash-2014.03.30","_type":"logs"}} +{"index":{"_index":"logstash-2014.03.30"}} {"message":"in24.inetnebr.com--[01/Aug2/1995:00:00:01-0400]\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"2001839","@version":"1","@timestamp":"2014-03-30T12:38:10.048Z","host":["romeo","in24.inetnebr.com"],"monthday":1,"month":8,"year":1995,"time":"00:00:01","tz":"-0400","request":"\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"","httpresponse":"200","size":1839,"rtime":"1995-08-01T00:00:01.000Z"} -{"index":{"_index":"logstash-2014.03.30","_type":"logs"}} +{"index":{"_index":"logstash-2014.03.30"}} {"message":"in24.inetnebr.com--[01/Aug/1995:00:00:01-0400]\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"2001839","@version":"1","@timestamp":"2014-03-30T12:38:10.048Z","host":["romeo","in24.inetnebr.com"],"monthday":1,"month":8,"year":1995,"time":"00:00:01","tz":"-0400","request":"\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"","httpresponse":"200","size":1839,"rtime":"1995-08-01T00:00:01.000Z"} -{"index":{"_index":"logstash-2014.03.30","_type":"logs"}} +{"index":{"_index":"logstash-2014.03.30"}} {"message":"in24.inetnebr.com--[01/Aug/1995:00:00:01-0400]\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"2001839","@version":"1","@timestamp":"2014-03-30T12:38:10.048Z","host":["romeo","in24.inetnebr.com"],"monthday":1,"month":8,"year":1995,"time":"00:00:01","tz":"-0400","request":"\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"","httpresponse":"200","size":1839,"rtime":"1995-08-01T00:00:01.000Z"} -{"index":{"_index":"logstash-2014.03.30","_type":"logs"}} +{"index":{"_index":"logstash-2014.03.30"}} {"message":"in24.inetnebr.com--[01/Aug2/1995:00:00:01-0400]\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"2001839","@version":"1","@timestamp":"2014-03-30T12:38:10.048Z","host":["romeo","in24.inetnebr.com"],"monthday":1,"month":8,"year":1995,"time":"00:00:01","tz":"-0400","request":"\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"","httpresponse":"200","size":1839,"rtime":"1995-08-01T00:00:01.000Z"} -{"index":{"_index":"logstash-2014.03.30","_type":"logs"}} +{"index":{"_index":"logstash-2014.03.30"}} {"message":"in24.inetnebr.com--[01/Aug/1995:00:00:01-0400]\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"2001839","@version":"1","@timestamp":"2014-03-30T12:38:10.048Z","host":["romeo","in24.inetnebr.com"],"monthday":1,"month":8,"year":1995,"time":"00:00:01","tz":"-0400","request":"\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"","httpresponse":"200","size":1839,"rtime":"1995-08-01T00:00:01.000Z"} -{"index":{"_index":"logstash-2014.03.30","_type":"logs"}} +{"index":{"_index":"logstash-2014.03.30"}} {"message":"in24.inetnebr.com--[01/Aug/1995:00:00:01-0400]\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"2001839","@version":"1","@timestamp":"2014-03-30T12:38:10.048Z","host":["romeo","in24.inetnebr.com"],"monthday":1,"month":8,"year":1995,"time":"00:00:01","tz":"-0400","request":"\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"","httpresponse":"200","size":1839,"rtime":"1995-08-01T00:00:01.000Z"} -{"index":{"_index":"logstash-2014.03.30","_type":"logs"}} +{"index":{"_index":"logstash-2014.03.30"}} {"message":"in24.inetnebr.com--[01/Aug2/1995:00:00:01-0400]\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"2001839","@version":"1","@timestamp":"2014-03-30T12:38:10.048Z","host":["romeo","in24.inetnebr.com"],"monthday":1,"month":8,"year":1995,"time":"00:00:01","tz":"-0400","request":"\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"","httpresponse":"200","size":1839,"rtime":"1995-08-01T00:00:01.000Z"} -{"index":{"_index":"logstash-2014.03.30","_type":"logs"}} +{"index":{"_index":"logstash-2014.03.30"}} {"message":"in24.inetnebr.com--[01/Aug/1995:00:00:01-0400]\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"2001839","@version":"1","@timestamp":"2014-03-30T12:38:10.048Z","host":["romeo","in24.inetnebr.com"],"monthday":1,"month":8,"year":1995,"time":"00:00:01","tz":"-0400","request":"\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"","httpresponse":"200","size":1839,"rtime":"1995-08-01T00:00:01.000Z"} -{"index":{"_index":"logstash-2014.03.30","_type":"logs"}} +{"index":{"_index":"logstash-2014.03.30"}} {"message":"in24.inetnebr.com--[01/Aug/1995:00:00:01-0400]\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"2001839","@version":"1","@timestamp":"2014-03-30T12:38:10.048Z","host":["romeo","in24.inetnebr.com"],"monthday":1,"month":8,"year":1995,"time":"00:00:01","tz":"-0400","request":"\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"","httpresponse":"200","size":1839,"rtime":"1995-08-01T00:00:01.000Z"} -{"index":{"_index":"logstash-2014.03.30","_type":"logs"}} +{"index":{"_index":"logstash-2014.03.30"}} {"message":"in24.inetnebr.com--[01/Aug2/1995:00:00:01-0400]\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"2001839","@version":"1","@timestamp":"2014-03-30T12:38:10.048Z","host":["romeo","in24.inetnebr.com"],"monthday":1,"month":8,"year":1995,"time":"00:00:01","tz":"-0400","request":"\"GET/shuttle/missions/sts-68/news/sts-68-mcc-05.txtHTTP/1.0\"","httpresponse":"200","size":1839,"rtime":"1995-08-01T00:00:01.000Z"} diff --git a/server/src/test/resources/org/opensearch/action/bulk/simple-bulk.json b/server/src/test/resources/org/opensearch/action/bulk/simple-bulk.json index cf76477187524..e36d1b7fc00b8 100644 --- a/server/src/test/resources/org/opensearch/action/bulk/simple-bulk.json +++ b/server/src/test/resources/org/opensearch/action/bulk/simple-bulk.json @@ -1,5 +1,5 @@ -{ "index":{"_index":"test","_type":"type1","_id":"1"} } +{ "index":{"_index":"test","_id":"1"} } { "field1" : "value1" } -{ "delete" : { "_index" : "test", "_type" : "type1", "_id" : "2" } } -{ "create" : { "_index" : "test", "_type" : "type1", "_id" : "3" } } +{ "delete" : { "_index" : "test", "_id" : "2" } } +{ "create" : { "_index" : "test", "_id" : "3" } } { "field1" : "value3" } diff --git a/server/src/test/resources/org/opensearch/action/bulk/simple-bulk10.json b/server/src/test/resources/org/opensearch/action/bulk/simple-bulk10.json index 3556dc261b037..7721d6f073fbd 100644 --- a/server/src/test/resources/org/opensearch/action/bulk/simple-bulk10.json +++ b/server/src/test/resources/org/opensearch/action/bulk/simple-bulk10.json @@ -1,15 +1,15 @@ -{ "index" : {"_index":null, "_type":"type1", "_id":"0"} } +{ "index" : {"_index":null, "_id":"0"} } { "field1" : "value1" } -{ "index" : {"_index":"test", "_type":null, "_id":"0"} } +{ "index" : {"_index":"test", "_id":"0"} } { "field1" : "value1" } -{ "index" : {"_index":"test", "_type":"type1", "_id":null} } +{ "index" : {"_index":"test", "_id":null} } { "field1" : "value1" } -{ "delete" : {"_index":null, "_type":"type1", "_id":"0"} } -{ "delete" : {"_index":"test", "_type":null, "_id":"0"} } -{ "delete" : {"_index":"test", "_type":"type1", "_id":null} } -{ "create" : {"_index":null, "_type":"type1", "_id":"0"} } +{ "delete" : {"_index":null, "_id":"0"} } +{ "delete" : {"_index":"test", "_id":"0"} } +{ "delete" : {"_index":"test", "_id":null} } +{ "create" : {"_index":null, "_id":"0"} } { "field1" : "value1" } -{ "create" : {"_index":"test", "_type":null, "_id":"0"} } +{ "create" : {"_index":"test", "_id":"0"} } { "field1" : "value1" } -{ "create" : {"_index":"test", "_type":"type1", "_id":null} } +{ "create" : {"_index":"test", "_id":null} } { "field1" : "value1" } diff --git a/server/src/test/resources/org/opensearch/action/bulk/simple-bulk11.json b/server/src/test/resources/org/opensearch/action/bulk/simple-bulk11.json index 9be3c13061234..2242dd01c8145 100644 --- a/server/src/test/resources/org/opensearch/action/bulk/simple-bulk11.json +++ b/server/src/test/resources/org/opensearch/action/bulk/simple-bulk11.json @@ -1,5 +1,5 @@ -{ "index":{"_index":"test","_type":"type1","_id":"1"} } +{ "index":{"_index":"test","_id":"1"} } { "field1" : "value1" } -{ "delete" : { "_index" : "test", "_type" : "type1", "_id" : "2" } } -{ "create" : { "_index" : "test", "_type" : "type1", "_id" : "3" } } +{ "delete" : { "_index" : "test", "_id" : "2" } } +{ "create" : { "_index" : "test", "_id" : "3" } } { "field1" : "value3" } \ No newline at end of file diff --git a/server/src/test/resources/org/opensearch/action/bulk/simple-bulk4.json b/server/src/test/resources/org/opensearch/action/bulk/simple-bulk4.json index 94d95614568ca..e1911094e7d88 100644 --- a/server/src/test/resources/org/opensearch/action/bulk/simple-bulk4.json +++ b/server/src/test/resources/org/opensearch/action/bulk/simple-bulk4.json @@ -1,6 +1,6 @@ { "update" : {"_id" : "1", "retry_on_conflict" : 2} } { "doc" : {"field" : "value"} } -{ "update" : { "_id" : "0", "_type" : "type1", "_index" : "index1" } } +{ "update" : { "_id" : "0", "_index" : "index1" } } { "script" : { "source" : "counter += param1", "lang" : "javascript", "params" : {"param1" : 1}}, "upsert" : {"counter" : 1}} { "delete" : { "_id" : "2" } } { "create" : { "_id" : "3" } } diff --git a/server/src/test/resources/org/opensearch/action/bulk/simple-bulk5.json b/server/src/test/resources/org/opensearch/action/bulk/simple-bulk5.json index 6ad5ff3052f25..9e04df1af69ee 100644 --- a/server/src/test/resources/org/opensearch/action/bulk/simple-bulk5.json +++ b/server/src/test/resources/org/opensearch/action/bulk/simple-bulk5.json @@ -1,5 +1,5 @@ -{ "index": {"_type": "type1","_id": "1"} } +{ "index": { "_id": "1"} } { "field1" : "value1" } -{ "delete" : { "_type" : "type1", "_id" : "2" } } -{ "create" : { "_type" : "type1", "_id" : "3" } } +{ "delete" : { "_id" : "2" } } +{ "create" : { "_id" : "3" } } { "field1" : "value3" } diff --git a/server/src/test/resources/org/opensearch/action/bulk/simple-bulk6.json b/server/src/test/resources/org/opensearch/action/bulk/simple-bulk6.json index e9c97965595eb..86e8757af832d 100644 --- a/server/src/test/resources/org/opensearch/action/bulk/simple-bulk6.json +++ b/server/src/test/resources/org/opensearch/action/bulk/simple-bulk6.json @@ -1,6 +1,6 @@ -{"index": {"_index": "test", "_type": "doc", "_source": {"hello": "world"}, "_id": 0}} +{"index": {"_index": "test", "_source": {"hello": "world"}, "_id": 0}} {"field1": "value0"} -{"index": {"_index": "test", "_type": "doc", "_id": 1}} +{"index": {"_index": "test", "_id": 1}} {"field1": "value1"} -{"index": {"_index": "test", "_type": "doc", "_id": 2}} +{"index": {"_index": "test", "_id": 2}} {"field1": "value2"} diff --git a/server/src/test/resources/org/opensearch/action/bulk/simple-bulk7.json b/server/src/test/resources/org/opensearch/action/bulk/simple-bulk7.json index 669bfd10798e9..cd742def27e9f 100644 --- a/server/src/test/resources/org/opensearch/action/bulk/simple-bulk7.json +++ b/server/src/test/resources/org/opensearch/action/bulk/simple-bulk7.json @@ -1,6 +1,6 @@ -{"index": {"_index": "test", "_type": "doc", "_id": 0}} +{"index": {"_index": "test", "_id": 0}} {"field1": "value0"} -{"index": {"_index": "test", "_type": "doc", "_id": 1}} +{"index": {"_index": "test", "_id": 1}} {"field1": "value1"} -{"index": {"_index": "test", "_type": "doc", "_id": 2, "_unknown": ["foo", "bar"]}} +{"index": {"_index": "test", "_id": 2, "_unknown": ["foo", "bar"]}} {"field1": "value2"} diff --git a/server/src/test/resources/org/opensearch/action/bulk/simple-bulk8.json b/server/src/test/resources/org/opensearch/action/bulk/simple-bulk8.json index c1a94b1d159d0..27d855258ed72 100644 --- a/server/src/test/resources/org/opensearch/action/bulk/simple-bulk8.json +++ b/server/src/test/resources/org/opensearch/action/bulk/simple-bulk8.json @@ -1,6 +1,6 @@ -{"index": {"_index": "test", "_type": "doc", "_id": 0}} +{"index": {"_index": "test", "_id": 0}} {"field1": "value0"} -{"index": {"_index": "test", "_type": "doc", "_id": 1, "_foo": "bar"}} +{"index": {"_index": "test", "_id": 1, "_foo": "bar"}} {"field1": "value1"} -{"index": {"_index": "test", "_type": "doc", "_id": 2}} +{"index": {"_index": "test", "_id": 2}} {"field1": "value2"} diff --git a/server/src/test/resources/org/opensearch/action/search/simple-msearch1.json b/server/src/test/resources/org/opensearch/action/search/simple-msearch1.json index 4749745cf0cb3..a40bc64471f34 100644 --- a/server/src/test/resources/org/opensearch/action/search/simple-msearch1.json +++ b/server/src/test/resources/org/opensearch/action/search/simple-msearch1.json @@ -1,6 +1,6 @@ {"index":"test", "ignore_unavailable" : true, "expand_wildcards" : "open,closed"} {"query" : {"match_all" :{}}} -{"index" : "test", "type" : "type1", "expand_wildcards" : ["open", "closed"]} +{"index" : "test", "expand_wildcards" : ["open", "closed"]} {"query" : {"match_all" :{}}} {"index":"test", "ignore_unavailable" : false, "expand_wildcards" : ["open"]} {"query" : {"match_all" :{}}} diff --git a/server/src/test/resources/org/opensearch/action/search/simple-msearch2.json b/server/src/test/resources/org/opensearch/action/search/simple-msearch2.json index 79330d80f7267..ef82fee039638 100644 --- a/server/src/test/resources/org/opensearch/action/search/simple-msearch2.json +++ b/server/src/test/resources/org/opensearch/action/search/simple-msearch2.json @@ -1,6 +1,6 @@ {"index":"test"} {"query" : {"match_all" : {}}} -{"index" : "test", "type" : "type1"} +{"index" : "test"} {"query" : {"match_all" : {}}} {} {"query" : {"match_all" : {}}} diff --git a/server/src/test/resources/org/opensearch/action/search/simple-msearch3.json b/server/src/test/resources/org/opensearch/action/search/simple-msearch3.json index a6b52fd3bf93e..f7ff9a2b3f991 100644 --- a/server/src/test/resources/org/opensearch/action/search/simple-msearch3.json +++ b/server/src/test/resources/org/opensearch/action/search/simple-msearch3.json @@ -1,8 +1,8 @@ {"index":["test0", "test1"]} {"query" : {"match_all" : {}}} -{"index" : "test2,test3", "type" : "type1"} +{"index" : "test2,test3"} {"query" : {"match_all" : {}}} -{"index" : ["test4", "test1"], "type" : [ "type2", "type1" ]} +{"index" : ["test4", "test1"]} {"query" : {"match_all" : {}}} {"search_type" : "dfs_query_then_fetch"} {"query" : {"match_all" : {}}} diff --git a/server/src/test/resources/org/opensearch/action/search/simple-msearch4.json b/server/src/test/resources/org/opensearch/action/search/simple-msearch4.json index 844d8bea1f8ee..4dd2cfde569dd 100644 --- a/server/src/test/resources/org/opensearch/action/search/simple-msearch4.json +++ b/server/src/test/resources/org/opensearch/action/search/simple-msearch4.json @@ -1,6 +1,6 @@ {"index":["test0", "test1"], "request_cache": true} {"query" : {"match_all" : {}}} -{"index" : "test2,test3", "type" : "type1", "preference": "_local"} +{"index" : "test2,test3", "preference": "_local"} {"query" : {"match_all" : {}}} -{"index" : ["test4", "test1"], "type" : [ "type2", "type1" ], "routing": "123"} +{"index" : ["test4", "test1"], "routing": "123"} {"query" : {"match_all" : {}}} diff --git a/server/src/test/resources/org/opensearch/action/termvectors/multiRequest1.json b/server/src/test/resources/org/opensearch/action/termvectors/multiRequest1.json index fcb5e3a927ad9..1cfc14ce50a95 100644 --- a/server/src/test/resources/org/opensearch/action/termvectors/multiRequest1.json +++ b/server/src/test/resources/org/opensearch/action/termvectors/multiRequest1.json @@ -7,7 +7,6 @@ "offsets":false, "positions":false, "fields":["a","b","c"], - "_index": "testidx", - "_type":"test" + "_index": "testidx" } -} \ No newline at end of file +} diff --git a/server/src/test/resources/org/opensearch/action/termvectors/multiRequest2.json b/server/src/test/resources/org/opensearch/action/termvectors/multiRequest2.json index a0709effe709b..bb5dcc1b6abe7 100644 --- a/server/src/test/resources/org/opensearch/action/termvectors/multiRequest2.json +++ b/server/src/test/resources/org/opensearch/action/termvectors/multiRequest2.json @@ -8,8 +8,7 @@ "offsets": false, "positions": false, "fields":["a","b","c"], - "_index": "testidx", - "_type": "test" + "_index": "testidx" }, { "_id": "2", @@ -19,8 +18,7 @@ "offsets": false, "positions": false, "fields":["a","b","c"], - "_index": "testidx", - "_type": "test" + "_index": "testidx" } ] -} \ No newline at end of file +} diff --git a/server/src/test/resources/org/opensearch/action/termvectors/multiRequest3.json b/server/src/test/resources/org/opensearch/action/termvectors/multiRequest3.json index 457f43cdc9aca..3157650aba974 100644 --- a/server/src/test/resources/org/opensearch/action/termvectors/multiRequest3.json +++ b/server/src/test/resources/org/opensearch/action/termvectors/multiRequest3.json @@ -2,7 +2,6 @@ "ids": ["1","2"], "parameters": { "_index": "testidx", - "_type": "test", "filter": { "max_num_terms": 20, "min_term_freq": 1, @@ -13,4 +12,4 @@ "max_word_length": 20 } } -} \ No newline at end of file +} diff --git a/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/genericstore/test-mapping.json b/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/genericstore/test-mapping.json index 70bf6dc7b5de0..557704b0bd4e3 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/genericstore/test-mapping.json +++ b/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/genericstore/test-mapping.json @@ -1,5 +1,5 @@ { - "person":{ + "_doc":{ "dynamic_templates":[ { "template_1":{ @@ -11,4 +11,4 @@ } ] } -} \ No newline at end of file +} diff --git a/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json b/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json index ad46106342639..8aa6d6ef8a613 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json +++ b/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json @@ -1,5 +1,5 @@ { - "person":{ + "_doc":{ "dynamic_templates":[ { "template_1":{ diff --git a/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/simple/test-mapping.json b/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/simple/test-mapping.json index 4b91bcfb36b5f..457fbdc668241 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/simple/test-mapping.json +++ b/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/simple/test-mapping.json @@ -1,5 +1,5 @@ { - "person":{ + "_doc":{ "dynamic_templates":[ { "template_1":{ diff --git a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping1.json b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping1.json index dbd74d33780d7..7828a4dbf587c 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping1.json +++ b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping1.json @@ -1,5 +1,5 @@ { - "person":{ + "_doc":{ "properties":{ "name":{ "type": "text", diff --git a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping2.json b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping2.json index b4d1843928891..0d6274dd50d3a 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping2.json +++ b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping2.json @@ -1,5 +1,5 @@ { - "person" :{ + "_doc" :{ "properties" :{ "name":{ "type": "text", diff --git a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping3.json b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping3.json index 459d9fc9b1eec..60a2751ede630 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping3.json +++ b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping3.json @@ -1,5 +1,5 @@ { - "person" : { + "_doc" : { "properties" :{ "name" : { "type" : "text", diff --git a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping4.json b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping4.json index 416633c4fc106..fe3fb35fc7def 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping4.json +++ b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-mapping4.json @@ -1,5 +1,5 @@ { - "person":{ + "_doc":{ "properties":{ "name":{ "type": "text", diff --git a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade1.json b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade1.json index b00ea46b56d61..acffa3100539e 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade1.json +++ b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade1.json @@ -1,5 +1,5 @@ { - "person":{ + "_doc":{ "properties":{ "name":{ "type": "text", diff --git a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade2.json b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade2.json index 563567f463eff..8acb62e0a1f25 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade2.json +++ b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade2.json @@ -1,5 +1,5 @@ { - "person":{ + "_doc":{ "properties":{ "name":{ "type": "text", diff --git a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade3.json b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade3.json index 5985ff316a772..c8552f41f8ca6 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade3.json +++ b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/upgrade3.json @@ -1,5 +1,5 @@ { - "person":{ + "_doc":{ "properties":{ "name":{ "type": "text", diff --git a/server/src/test/resources/org/opensearch/index/mapper/multifield/test-multi-fields.json b/server/src/test/resources/org/opensearch/index/mapper/multifield/test-multi-fields.json index b7317aba3c148..9f9c18a30f8e6 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/multifield/test-multi-fields.json +++ b/server/src/test/resources/org/opensearch/index/mapper/multifield/test-multi-fields.json @@ -1,5 +1,5 @@ { - "person": { + "_doc": { "properties": { "name": { "type": "text", diff --git a/server/src/test/resources/org/opensearch/index/mapper/path/test-mapping.json b/server/src/test/resources/org/opensearch/index/mapper/path/test-mapping.json index 8d7505624b1b8..e3a14f83b4743 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/path/test-mapping.json +++ b/server/src/test/resources/org/opensearch/index/mapper/path/test-mapping.json @@ -1,5 +1,5 @@ { - "person":{ + "_doc":{ "properties":{ "name1":{ "type":"object", diff --git a/server/src/test/resources/org/opensearch/index/mapper/simple/test-mapping.json b/server/src/test/resources/org/opensearch/index/mapper/simple/test-mapping.json index 0f99af91ecb3a..55e462029ee6b 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/simple/test-mapping.json +++ b/server/src/test/resources/org/opensearch/index/mapper/simple/test-mapping.json @@ -1,5 +1,5 @@ { - "person":{ + "_doc":{ "_meta":{ "param1":"value1" }, diff --git a/server/src/test/resources/org/opensearch/search/geo/gzippedmap.gz b/server/src/test/resources/org/opensearch/search/geo/gzippedmap.gz index f8894811a6c08..77dd01fbf5eed 100644 Binary files a/server/src/test/resources/org/opensearch/search/geo/gzippedmap.gz and b/server/src/test/resources/org/opensearch/search/geo/gzippedmap.gz differ diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle index 2dfbb3e147205..d1040acd03aa7 100644 --- a/test/fixtures/hdfs-fixture/build.gradle +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -38,9 +38,9 @@ dependencies { api "commons-codec:commons-codec:${versions.commonscodec}" api "org.apache.logging.log4j:log4j-core:${versions.log4j}" api "io.netty:netty-all:${versions.netty}" - api 'com.google.code.gson:gson:2.8.9' + api 'com.google.code.gson:gson:2.9.0' api "org.bouncycastle:bcpkix-jdk15on:${versions.bouncycastle}" api "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:${versions.jackson}" api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}" - api 'net.minidev:json-smart:2.4.7' + api 'net.minidev:json-smart:2.4.8' } diff --git a/test/framework/src/main/java/org/opensearch/cluster/OpenSearchAllocationTestCase.java b/test/framework/src/main/java/org/opensearch/cluster/OpenSearchAllocationTestCase.java index 26a96971ef9b2..e806548cee088 100644 --- a/test/framework/src/main/java/org/opensearch/cluster/OpenSearchAllocationTestCase.java +++ b/test/framework/src/main/java/org/opensearch/cluster/OpenSearchAllocationTestCase.java @@ -80,10 +80,8 @@ public abstract class OpenSearchAllocationTestCase extends OpenSearchTestCase { ) { @Override public Long getShardSize(ShardRouting shardRouting) { - assert shardRouting.recoverySource() - .getType() == RecoverySource.Type.SNAPSHOT : "Expecting a recovery source of type [SNAPSHOT] but got [" - + shardRouting.recoverySource().getType() - + ']'; + assert shardRouting.recoverySource().getType() == RecoverySource.Type.SNAPSHOT + : "Expecting a recovery source of type [SNAPSHOT] but got [" + shardRouting.recoverySource().getType() + ']'; throw new UnsupportedOperationException(); } }; diff --git a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java index 7654606767e43..2bce5a7c81794 100644 --- a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java @@ -61,6 +61,8 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; +import org.junit.After; +import org.junit.Before; import org.opensearch.Version; import org.opensearch.action.index.IndexRequest; import org.opensearch.action.support.replication.ReplicationResponse; @@ -74,6 +76,7 @@ import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.compress.CompressedXContent; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.lucene.Lucene; import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.settings.Settings; @@ -113,12 +116,10 @@ import org.opensearch.indices.breaker.CircuitBreakerService; import org.opensearch.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.DummyShardLock; -import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.IndexSettingsModule; +import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; -import org.junit.After; -import org.junit.Before; import java.io.IOException; import java.nio.charset.Charset; @@ -143,14 +144,14 @@ import static java.util.Collections.emptyList; import static java.util.Collections.shuffle; -import static org.opensearch.index.engine.Engine.Operation.Origin.PEER_RECOVERY; -import static org.opensearch.index.engine.Engine.Operation.Origin.PRIMARY; -import static org.opensearch.index.engine.Engine.Operation.Origin.REPLICA; -import static org.opensearch.index.translog.TranslogDeletionPolicies.createTranslogDeletionPolicy; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; +import static org.opensearch.index.engine.Engine.Operation.Origin.PEER_RECOVERY; +import static org.opensearch.index.engine.Engine.Operation.Origin.PRIMARY; +import static org.opensearch.index.engine.Engine.Operation.Origin.REPLICA; +import static org.opensearch.index.translog.TranslogDeletionPolicies.createTranslogDeletionPolicy; public abstract class EngineTestCase extends OpenSearchTestCase { @@ -174,10 +175,6 @@ public abstract class EngineTestCase extends OpenSearchTestCase { // A default primary term is used by engine instances created in this test. protected final PrimaryTermSupplier primaryTerm = new PrimaryTermSupplier(1L); - protected static void assertVisibleCount(Engine engine, int numDocs) throws IOException { - assertVisibleCount(engine, numDocs, true); - } - protected static void assertVisibleCount(Engine engine, int numDocs, boolean refresh) throws IOException { if (refresh) { engine.refresh("test"); @@ -332,14 +329,14 @@ public void tearDown() throws Exception { try { if (engine != null && engine.isClosed.get() == false) { engine.getTranslog().getDeletionPolicy().assertNoOpenTranslogRefs(); - assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, createMapperService("test")); + assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine); assertNoInFlightDocuments(engine); assertMaxSeqNoInCommitUserData(engine); assertAtMostOneLuceneDocumentPerSequenceNumber(engine); } if (replicaEngine != null && replicaEngine.isClosed.get() == false) { replicaEngine.getTranslog().getDeletionPolicy().assertNoOpenTranslogRefs(); - assertConsistentHistoryBetweenTranslogAndLuceneIndex(replicaEngine, createMapperService("test")); + assertConsistentHistoryBetweenTranslogAndLuceneIndex(replicaEngine); assertNoInFlightDocuments(replicaEngine); assertMaxSeqNoInCommitUserData(replicaEngine); assertAtMostOneLuceneDocumentPerSequenceNumber(replicaEngine); @@ -411,21 +408,11 @@ protected static ParsedDocument testParsedDocument( } else { document.add(new StoredField(SourceFieldMapper.NAME, ref.bytes, ref.offset, ref.length)); } - return new ParsedDocument( - versionField, - seqID, - id, - "test", - routing, - Arrays.asList(document), - source, - XContentType.JSON, - mappingUpdate - ); + return new ParsedDocument(versionField, seqID, id, routing, Arrays.asList(document), source, XContentType.JSON, mappingUpdate); } public static CheckedBiFunction nestedParsedDocFactory() throws Exception { - final MapperService mapperService = createMapperService("type"); + final MapperService mapperService = createMapperService(); final String nestedMapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() @@ -449,7 +436,7 @@ public static CheckedBiFunction ne source.endObject(); } source.endObject(); - return nestedMapper.parse(new SourceToParse("test", "type", docId, BytesReference.bytes(source), XContentType.JSON)); + return nestedMapper.parse(new SourceToParse("test", docId, BytesReference.bytes(source), XContentType.JSON)); }; } @@ -459,7 +446,7 @@ public static CheckedBiFunction ne public static EngineConfig.TombstoneDocSupplier tombstoneDocSupplier() { return new EngineConfig.TombstoneDocSupplier() { @Override - public ParsedDocument newDeleteTombstoneDoc(String type, String id) { + public ParsedDocument newDeleteTombstoneDoc(String id) { final ParseContext.Document doc = new ParseContext.Document(); Field uidField = new Field(IdFieldMapper.NAME, Uid.encodeId(id), IdFieldMapper.Defaults.FIELD_TYPE); doc.add(uidField); @@ -475,7 +462,6 @@ public ParsedDocument newDeleteTombstoneDoc(String type, String id) { versionField, seqID, id, - type, null, Collections.singletonList(doc), new BytesArray("{}"), @@ -497,17 +483,7 @@ public ParsedDocument newNoopTombstoneDoc(String reason) { doc.add(versionField); BytesRef byteRef = new BytesRef(reason); doc.add(new StoredField(SourceFieldMapper.NAME, byteRef.bytes, byteRef.offset, byteRef.length)); - return new ParsedDocument( - versionField, - seqID, - null, - null, - null, - Collections.singletonList(doc), - null, - XContentType.JSON, - null - ); + return new ParsedDocument(versionField, seqID, null, null, Collections.singletonList(doc), null, XContentType.JSON, null); } }; } @@ -965,7 +941,7 @@ public static Term newUid(ParsedDocument doc) { } protected Engine.Get newGet(boolean realtime, ParsedDocument doc) { - return new Engine.Get(realtime, realtime, doc.type(), doc.id(), newUid(doc)); + return new Engine.Get(realtime, realtime, doc.id(), newUid(doc)); } protected Engine.Index indexForDoc(ParsedDocument doc) { @@ -990,7 +966,7 @@ protected Engine.Index replicaIndexForDoc(ParsedDocument doc, long version, long } protected Engine.Delete replicaDeleteForDoc(String id, long version, long seqNo, long startTime) { - return new Engine.Delete("test", id, newUid(id), seqNo, 1, version, null, REPLICA, startTime, SequenceNumbers.UNASSIGNED_SEQ_NO, 0); + return new Engine.Delete(id, newUid(id), seqNo, 1, version, null, REPLICA, startTime, SequenceNumbers.UNASSIGNED_SEQ_NO, 0); } protected static void assertVisibleCount(InternalEngine engine, int numDocs) throws IOException { @@ -1055,7 +1031,6 @@ public static List generateSingleDocHistory( ); } else { op = new Engine.Delete( - "test", docId, id, forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO, @@ -1114,7 +1089,6 @@ public List generateHistoryOnReplica( case DELETE: operations.add( new Engine.Delete( - doc.type(), doc.id(), EngineTestCase.newUid(doc), seqNo, @@ -1336,9 +1310,9 @@ public static List getDocIds(Engine engine, boolean refresh * Reads all engine operations that have been processed by the engine from Lucene index. * The returned operations are sorted and de-duplicated, thus each sequence number will be have at most one operation. */ - public static List readAllOperationsInLucene(Engine engine, MapperService mapper) throws IOException { + public static List readAllOperationsInLucene(Engine engine) throws IOException { final List operations = new ArrayList<>(); - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", mapper, 0, Long.MAX_VALUE, false)) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", 0, Long.MAX_VALUE, false, randomBoolean())) { Translog.Operation op; while ((op = snapshot.next()) != null) { operations.add(op); @@ -1350,13 +1324,9 @@ public static List readAllOperationsInLucene(Engine engine, /** * Reads all engine operations that have been processed by the engine from Lucene index/Translog based on source. */ - public static List readAllOperationsBasedOnSource( - Engine engine, - Engine.HistorySource historySource, - MapperService mapper - ) throws IOException { + public static List readAllOperationsBasedOnSource(Engine engine) throws IOException { final List operations = new ArrayList<>(); - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", historySource, mapper, 0, Long.MAX_VALUE, false)) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", 0, Long.MAX_VALUE, false, randomBoolean())) { Translog.Operation op; while ((op = snapshot.next()) != null) { operations.add(op); @@ -1368,8 +1338,8 @@ public static List readAllOperationsBasedOnSource( /** * Asserts the provided engine has a consistent document history between translog and Lucene index. */ - public static void assertConsistentHistoryBetweenTranslogAndLuceneIndex(Engine engine, MapperService mapper) throws IOException { - if (mapper == null || mapper.documentMapper() == null || (engine instanceof InternalEngine) == false) { + public static void assertConsistentHistoryBetweenTranslogAndLuceneIndex(Engine engine) throws IOException { + if (engine instanceof InternalEngine == false) { return; } final List translogOps = new ArrayList<>(); @@ -1379,7 +1349,7 @@ public static void assertConsistentHistoryBetweenTranslogAndLuceneIndex(Engine e translogOps.add(op); } } - final Map luceneOps = readAllOperationsInLucene(engine, mapper).stream() + final Map luceneOps = readAllOperationsInLucene(engine).stream() .collect(Collectors.toMap(Translog.Operation::seqNo, Function.identity())); final long maxSeqNo = ((InternalEngine) engine).getLocalCheckpointTracker().getMaxSeqNo(); for (Translog.Operation op : translogOps) { @@ -1392,8 +1362,8 @@ public static void assertConsistentHistoryBetweenTranslogAndLuceneIndex(Engine e final long retainedOps = engine.config().getIndexSettings().getSoftDeleteRetentionOperations(); final long seqNoForRecovery; if (engine.config().getIndexSettings().isSoftDeleteEnabled()) { - try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) { - seqNoForRecovery = Long.parseLong(safeCommit.getIndexCommit().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) + 1; + try (GatedCloseable wrappedSafeCommit = engine.acquireSafeIndexCommit()) { + seqNoForRecovery = Long.parseLong(wrappedSafeCommit.get().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) + 1; } } else { seqNoForRecovery = engine.getMinRetainedSeqNo(); @@ -1481,7 +1451,7 @@ public static void assertAtMostOneLuceneDocumentPerSequenceNumber(IndexSettings } } - public static MapperService createMapperService(String type) throws IOException { + public static MapperService createMapperService() throws IOException { IndexMetadata indexMetadata = IndexMetadata.builder("test") .settings( Settings.builder() @@ -1489,7 +1459,7 @@ public static MapperService createMapperService(String type) throws IOException .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) ) - .putMapping(type, "{\"properties\": {}}") + .putMapping("{\"properties\": {}}") .build(); MapperService mapperService = MapperTestUtils.newMapperService( new NamedXContentRegistry(ClusterModule.getNamedXWriteables()), diff --git a/test/framework/src/main/java/org/opensearch/index/engine/TranslogHandler.java b/test/framework/src/main/java/org/opensearch/index/engine/TranslogHandler.java index afb44caa64987..e1f2357aa2400 100644 --- a/test/framework/src/main/java/org/opensearch/index/engine/TranslogHandler.java +++ b/test/framework/src/main/java/org/opensearch/index/engine/TranslogHandler.java @@ -44,7 +44,6 @@ import org.opensearch.index.mapper.DocumentMapper; import org.opensearch.index.mapper.DocumentMapperForType; import org.opensearch.index.mapper.MapperService; -import org.opensearch.index.mapper.Mapping; import org.opensearch.index.mapper.RootObjectMapper; import org.opensearch.index.mapper.SourceToParse; import org.opensearch.index.seqno.SequenceNumbers; @@ -65,8 +64,6 @@ public class TranslogHandler implements Engine.TranslogRecoveryRunner { private final MapperService mapperService; - public Mapping mappingUpdate = null; - private final Map recoveredTypes = new HashMap<>(); private final AtomicLong appliedOperations = new AtomicLong(); @@ -95,21 +92,13 @@ public TranslogHandler(NamedXContentRegistry xContentRegistry, IndexSettings ind private DocumentMapperForType docMapper(String type) { RootObjectMapper.Builder rootBuilder = new RootObjectMapper.Builder(type); DocumentMapper.Builder b = new DocumentMapper.Builder(rootBuilder, mapperService); - return new DocumentMapperForType(b.build(mapperService), mappingUpdate); + return new DocumentMapperForType(b.build(mapperService), null); } private void applyOperation(Engine engine, Engine.Operation operation) throws IOException { switch (operation.operationType()) { case INDEX: - Engine.Index engineIndex = (Engine.Index) operation; - Mapping update = engineIndex.parsedDoc().dynamicMappingsUpdate(); - if (engineIndex.parsedDoc().dynamicMappingsUpdate() != null) { - recoveredTypes.compute( - engineIndex.type(), - (k, mapping) -> mapping == null ? update : mapping.merge(update, MapperService.MergeReason.MAPPING_RECOVERY) - ); - } - engine.index(engineIndex); + engine.index((Engine.Index) operation); break; case DELETE: engine.delete((Engine.Delete) operation); @@ -122,13 +111,6 @@ private void applyOperation(Engine engine, Engine.Operation operation) throws IO } } - /** - * Returns the recovered types modifying the mapping during the recovery - */ - public Map getRecoveredTypes() { - return recoveredTypes; - } - @Override public int run(Engine engine, Translog.Snapshot snapshot) throws IOException { int opsRecovered = 0; @@ -150,15 +132,8 @@ public Engine.Operation convertToEngineOp(Translog.Operation operation, Engine.O final Translog.Index index = (Translog.Index) operation; final String indexName = mapperService.index().getName(); final Engine.Index engineIndex = IndexShard.prepareIndex( - docMapper(index.type()), - new SourceToParse( - indexName, - index.type(), - index.id(), - index.source(), - XContentHelper.xContentType(index.source()), - index.routing() - ), + docMapper(MapperService.SINGLE_MAPPING_NAME), + new SourceToParse(indexName, index.id(), index.source(), XContentHelper.xContentType(index.source()), index.routing()), index.seqNo(), index.primaryTerm(), index.version(), @@ -172,20 +147,16 @@ public Engine.Operation convertToEngineOp(Translog.Operation operation, Engine.O return engineIndex; case DELETE: final Translog.Delete delete = (Translog.Delete) operation; - final Engine.Delete engineDelete = new Engine.Delete( - delete.type(), + return IndexShard.prepareDelete( delete.id(), - delete.uid(), delete.seqNo(), delete.primaryTerm(), delete.version(), versionType, origin, - System.nanoTime(), SequenceNumbers.UNASSIGNED_SEQ_NO, SequenceNumbers.UNASSIGNED_PRIMARY_TERM ); - return engineDelete; case NO_OP: final Translog.NoOp noOp = (Translog.NoOp) operation; final Engine.NoOp engineNoOp = new Engine.NoOp(noOp.seqNo(), noOp.primaryTerm(), origin, System.nanoTime(), noOp.reason()); diff --git a/test/framework/src/main/java/org/opensearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/opensearch/index/mapper/MapperServiceTestCase.java index a3ee32b7fab57..03ac664da1734 100644 --- a/test/framework/src/main/java/org/opensearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/mapper/MapperServiceTestCase.java @@ -183,11 +183,11 @@ protected final SourceToParse source(CheckedConsumer { throw new UnsupportedOperationException("search lookup not available"); }, null) + new SearchLookup(mapperService, (ft, s) -> { throw new UnsupportedOperationException("search lookup not available"); }) ); return queryShardContext; } diff --git a/test/framework/src/main/java/org/opensearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/opensearch/index/mapper/MapperTestCase.java index fc5c7283ed8b3..a64193e55836d 100644 --- a/test/framework/src/main/java/org/opensearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/mapper/MapperTestCase.java @@ -293,7 +293,7 @@ protected final List fetchFromDocValues(MapperService mapperService, MappedFi mapperService, iw -> { iw.addDocument(mapperService.documentMapper().parse(source(b -> b.field(ft.name(), sourceValue))).rootDoc()); }, iw -> { - SearchLookup lookup = new SearchLookup(mapperService, fieldDataLookup, null); + SearchLookup lookup = new SearchLookup(mapperService, fieldDataLookup); ValueFetcher valueFetcher = new DocValueFetcher(format, lookup.doc().getForField(ft)); IndexSearcher searcher = newSearcher(iw); LeafReaderContext context = searcher.getIndexReader().leaves().get(0); diff --git a/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java index ee92e3b134394..6c8e5a8c0a10f 100644 --- a/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java @@ -85,6 +85,7 @@ import org.opensearch.index.engine.EngineConfigFactory; import org.opensearch.index.engine.EngineFactory; import org.opensearch.index.engine.InternalEngineFactory; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.seqno.GlobalCheckpointSyncAction; import org.opensearch.index.seqno.RetentionLease; import org.opensearch.index.seqno.RetentionLeaseSyncAction; @@ -108,7 +109,6 @@ import java.util.HashSet; import java.util.Iterator; import java.util.List; -import java.util.Map; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.Future; @@ -128,7 +128,7 @@ public abstract class OpenSearchIndexLevelReplicationTestCase extends IndexShard protected final Index index = new Index("test", "uuid"); private final ShardId shardId = new ShardId(index, 0); - protected final Map indexMapping = Collections.singletonMap("type", "{ \"type\": {} }"); + protected final String indexMapping = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": {} }"; protected ReplicationGroup createGroup(int replicas) throws IOException { return createGroup(replicas, Settings.EMPTY); @@ -143,11 +143,11 @@ protected IndexMetadata buildIndexMetadata(int replicas) throws IOException { return buildIndexMetadata(replicas, indexMapping); } - protected IndexMetadata buildIndexMetadata(int replicas, Map mappings) throws IOException { + protected IndexMetadata buildIndexMetadata(int replicas, String mappings) throws IOException { return buildIndexMetadata(replicas, Settings.EMPTY, mappings); } - protected IndexMetadata buildIndexMetadata(int replicas, Settings indexSettings, Map mappings) throws IOException { + protected IndexMetadata buildIndexMetadata(int replicas, Settings indexSettings, String mappings) throws IOException { Settings settings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, replicas) @@ -155,10 +155,11 @@ protected IndexMetadata buildIndexMetadata(int replicas, Settings indexSettings, .put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), between(0, 1000)) .put(indexSettings) .build(); - IndexMetadata.Builder metadata = IndexMetadata.builder(index.getName()).settings(settings).primaryTerm(0, randomIntBetween(1, 100)); - for (Map.Entry typeMapping : mappings.entrySet()) { - metadata.putMapping(typeMapping.getKey(), typeMapping.getValue()); - } + IndexMetadata.Builder metadata = IndexMetadata.builder(index.getName()) + .settings(settings) + .putMapping(mappings) + .primaryTerm(0, randomIntBetween(1, 100)); + return metadata.build(); } @@ -246,7 +247,7 @@ protected EngineConfigFactory getEngineConfigFactory(IndexSettings indexSettings public int indexDocs(final int numOfDoc) throws Exception { for (int doc = 0; doc < numOfDoc; doc++) { - final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", Integer.toString(docId.incrementAndGet())) + final IndexRequest indexRequest = new IndexRequest(index.getName()).id(Integer.toString(docId.incrementAndGet())) .source("{}", XContentType.JSON); final BulkItemResponse response = index(indexRequest); if (response.isFailed()) { @@ -260,7 +261,7 @@ public int indexDocs(final int numOfDoc) throws Exception { public int appendDocs(final int numOfDoc) throws Exception { for (int doc = 0; doc < numOfDoc; doc++) { - final IndexRequest indexRequest = new IndexRequest(index.getName(), "type").source("{}", XContentType.JSON); + final IndexRequest indexRequest = new IndexRequest(index.getName()).source("{}", XContentType.JSON); final BulkItemResponse response = index(indexRequest); if (response.isFailed()) { throw response.getFailure().getCause(); @@ -351,10 +352,8 @@ public IndexShard addReplica() throws IOException { } public synchronized void addReplica(IndexShard replica) throws IOException { - assert shardRoutings().stream() - .anyMatch(shardRouting -> shardRouting.isSameAllocation(replica.routingEntry())) == false : "replica with aId [" - + replica.routingEntry().allocationId() - + "] already exists"; + assert shardRoutings().stream().anyMatch(shardRouting -> shardRouting.isSameAllocation(replica.routingEntry())) == false + : "replica with aId [" + replica.routingEntry().allocationId() + "] already exists"; replicas.add(replica); if (replicationTargets != null) { replicationTargets.addReplica(replica); @@ -909,7 +908,7 @@ private void executeShardBulkOnPrimary( final PlainActionFuture permitAcquiredFuture = new PlainActionFuture<>(); primary.acquirePrimaryOperationPermit(permitAcquiredFuture, ThreadPool.Names.SAME, request); try (Releasable ignored = permitAcquiredFuture.actionGet()) { - MappingUpdatePerformer noopMappingUpdater = (update, shardId, type, listener1) -> {}; + MappingUpdatePerformer noopMappingUpdater = (update, shardId, listener1) -> {}; TransportShardBulkAction.performOnPrimary( request, primary, diff --git a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java index 54b3ffbfd3a1c..509edfd1b9103 100644 --- a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java @@ -32,6 +32,7 @@ package org.opensearch.index.shard; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexCommit; import org.apache.lucene.store.Directory; import org.opensearch.Version; import org.opensearch.action.admin.indices.flush.FlushRequest; @@ -52,6 +53,7 @@ import org.opensearch.common.Nullable; import org.opensearch.common.UUIDs; import org.opensearch.common.bytes.BytesArray; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; @@ -113,10 +115,10 @@ import java.util.function.Consumer; import java.util.stream.Collectors; -import static org.opensearch.cluster.routing.TestShardRouting.newShardRouting; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +import static org.opensearch.cluster.routing.TestShardRouting.newShardRouting; /** * A base class for unit tests that need to create and shutdown {@link IndexShard} instances easily, @@ -277,7 +279,7 @@ protected IndexShard newShard( IndexMetadata.Builder metadata = IndexMetadata.builder(shardRouting.getIndexName()) .settings(indexSettings) .primaryTerm(0, primaryTerm) - .putMapping("_doc", "{ \"properties\": {} }"); + .putMapping("{ \"properties\": {} }"); return newShard(shardRouting, metadata.build(), null, engineFactory, () -> {}, RetentionLeaseSyncer.EMPTY, listeners); } @@ -866,7 +868,7 @@ public static void assertConsistentHistoryBetweenTranslogAndLucene(IndexShard sh } final Engine engine = shard.getEngineOrNull(); if (engine != null) { - EngineTestCase.assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, shard.mapperService()); + EngineTestCase.assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine); } } @@ -875,25 +877,12 @@ protected Engine.IndexResult indexDoc(IndexShard shard, String type, String id) } protected Engine.IndexResult indexDoc(IndexShard shard, String type, String id, String source) throws IOException { - return indexDoc(shard, type, id, source, XContentType.JSON, null); + return indexDoc(shard, id, source, XContentType.JSON, null); } - protected Engine.IndexResult indexDoc( - IndexShard shard, - String type, - String id, - String source, - XContentType xContentType, - String routing - ) throws IOException { - SourceToParse sourceToParse = new SourceToParse( - shard.shardId().getIndexName(), - type, - id, - new BytesArray(source), - xContentType, - routing - ); + protected Engine.IndexResult indexDoc(IndexShard shard, String id, String source, XContentType xContentType, String routing) + throws IOException { + SourceToParse sourceToParse = new SourceToParse(shard.shardId().getIndexName(), id, new BytesArray(source), xContentType, routing); Engine.IndexResult result; if (shard.routingEntry().primary()) { result = shard.applyIndexOperationOnPrimary( @@ -909,7 +898,7 @@ protected Engine.IndexResult indexDoc( updateMappings( shard, IndexMetadata.builder(shard.indexSettings().getIndexMetadata()) - .putMapping(type, result.getRequiredMappingUpdate().toString()) + .putMapping(result.getRequiredMappingUpdate().toString()) .build() ); result = shard.applyIndexOperationOnPrimary( @@ -954,12 +943,11 @@ protected void updateMappings(IndexShard shard, IndexMetadata indexMetadata) { ); } - protected Engine.DeleteResult deleteDoc(IndexShard shard, String type, String id) throws IOException { + protected Engine.DeleteResult deleteDoc(IndexShard shard, String id) throws IOException { final Engine.DeleteResult result; if (shard.routingEntry().primary()) { result = shard.applyDeleteOperationOnPrimary( Versions.MATCH_ANY, - type, id, VersionType.INTERNAL, SequenceNumbers.UNASSIGNED_SEQ_NO, @@ -970,7 +958,7 @@ protected Engine.DeleteResult deleteDoc(IndexShard shard, String type, String id } else { final long seqNo = shard.seqNoStats().getMaxSeqNo() + 1; shard.advanceMaxSeqNoOfUpdatesOrDeletes(seqNo); // manually replicate max_seq_no_of_updates - result = shard.applyDeleteOperationOnReplica(seqNo, shard.getOperationPrimaryTerm(), 0L, type, id); + result = shard.applyDeleteOperationOnReplica(seqNo, shard.getOperationPrimaryTerm(), 0L, id); shard.sync(); // advance local checkpoint } return result; @@ -1030,13 +1018,13 @@ protected String snapshotShard(final IndexShard shard, final Snapshot snapshot, ); final PlainActionFuture future = PlainActionFuture.newFuture(); final String shardGen; - try (Engine.IndexCommitRef indexCommitRef = shard.acquireLastIndexCommit(true)) { + try (GatedCloseable wrappedIndexCommit = shard.acquireLastIndexCommit(true)) { repository.snapshotShard( shard.store(), shard.mapperService(), snapshot.getSnapshotId(), indexId, - indexCommitRef.getIndexCommit(), + wrappedIndexCommit.get(), null, snapshotStatus, Version.CURRENT, diff --git a/test/framework/src/main/java/org/opensearch/ingest/RandomDocumentPicks.java b/test/framework/src/main/java/org/opensearch/ingest/RandomDocumentPicks.java index 70b290c38ceba..5d55f098a1f82 100644 --- a/test/framework/src/main/java/org/opensearch/ingest/RandomDocumentPicks.java +++ b/test/framework/src/main/java/org/opensearch/ingest/RandomDocumentPicks.java @@ -149,7 +149,6 @@ public static IngestDocument randomIngestDocument(Random random) { */ public static IngestDocument randomIngestDocument(Random random, Map source) { String index = randomString(random); - String type = randomString(random); String id = randomString(random); String routing = null; Long version = randomNonNegtiveLong(random); @@ -160,7 +159,7 @@ public static IngestDocument randomIngestDocument(Random random, Map randomSource(Random random) { diff --git a/test/framework/src/main/java/org/opensearch/repositories/AbstractThirdPartyRepositoryTestCase.java b/test/framework/src/main/java/org/opensearch/repositories/AbstractThirdPartyRepositoryTestCase.java index e0aa8cad3fdf2..8ddaba8067b2d 100644 --- a/test/framework/src/main/java/org/opensearch/repositories/AbstractThirdPartyRepositoryTestCase.java +++ b/test/framework/src/main/java/org/opensearch/repositories/AbstractThirdPartyRepositoryTestCase.java @@ -110,9 +110,9 @@ public void testCreateSnapshot() { logger.info("--> indexing some data"); for (int i = 0; i < 100; i++) { - client().prepareIndex("test-idx-1", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); - client().prepareIndex("test-idx-2", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); - client().prepareIndex("test-idx-3", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-1").setId(Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-2").setId(Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-3").setId(Integer.toString(i)).setSource("foo", "bar" + i).get(); } client().admin().indices().prepareRefresh().get(); @@ -182,9 +182,9 @@ public void testCleanup() throws Exception { logger.info("--> indexing some data"); for (int i = 0; i < 100; i++) { - client().prepareIndex("test-idx-1", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); - client().prepareIndex("test-idx-2", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); - client().prepareIndex("test-idx-3", "doc", Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-1").setId(Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-2").setId(Integer.toString(i)).setSource("foo", "bar" + i).get(); + client().prepareIndex("test-idx-3").setId(Integer.toString(i)).setSource("foo", "bar" + i).get(); } client().admin().indices().prepareRefresh().get(); diff --git a/test/framework/src/main/java/org/opensearch/repositories/blobstore/OpenSearchBlobStoreRepositoryIntegTestCase.java b/test/framework/src/main/java/org/opensearch/repositories/blobstore/OpenSearchBlobStoreRepositoryIntegTestCase.java index ac7dfcb709c5f..7d9810a11e143 100644 --- a/test/framework/src/main/java/org/opensearch/repositories/blobstore/OpenSearchBlobStoreRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/repositories/blobstore/OpenSearchBlobStoreRepositoryIntegTestCase.java @@ -326,7 +326,7 @@ public void testSnapshotAndRestore() throws Exception { logger.info("--> delete {} random documents from {}", deleteCount, index); for (int i = 0; i < deleteCount; i++) { int doc = randomIntBetween(0, docCount - 1); - client().prepareDelete(index, index, Integer.toString(doc)).get(); + client().prepareDelete(index, Integer.toString(doc)).get(); } client().admin().indices().prepareRefresh(index).get(); } @@ -379,7 +379,7 @@ public void testMultipleSnapshotAndRollback() throws Exception { logger.info("--> delete {} random documents from {}", deleteCount, indexName); for (int j = 0; j < deleteCount; j++) { int doc = randomIntBetween(0, docCount - 1); - client().prepareDelete(indexName, indexName, Integer.toString(doc)).get(); + client().prepareDelete(indexName, Integer.toString(doc)).get(); } client().admin().indices().prepareRefresh(indexName).get(); } @@ -493,7 +493,8 @@ public void testIndicesDeletedFromRepository() throws Exception { protected void addRandomDocuments(String name, int numDocs) throws InterruptedException { IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { - indexRequestBuilders[i] = client().prepareIndex(name, name, Integer.toString(i)) + indexRequestBuilders[i] = client().prepareIndex(name) + .setId(Integer.toString(i)) .setRouting(randomAlphaOfLength(randomIntBetween(1, 10))) .setSource("field", "value"); } diff --git a/test/framework/src/main/java/org/opensearch/search/RandomSearchRequestGenerator.java b/test/framework/src/main/java/org/opensearch/search/RandomSearchRequestGenerator.java index 1034fb4802c9a..ec8545e583d33 100644 --- a/test/framework/src/main/java/org/opensearch/search/RandomSearchRequestGenerator.java +++ b/test/framework/src/main/java/org/opensearch/search/RandomSearchRequestGenerator.java @@ -109,9 +109,6 @@ public static SearchRequest randomSearchRequest(Supplier ra if (randomBoolean()) { searchRequest.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())); } - if (randomBoolean()) { - searchRequest.types(generateRandomStringArray(10, 10, false, false)); - } if (randomBoolean()) { searchRequest.preference(randomAlphaOfLengthBetween(3, 10)); } diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java index b08edf1028607..cbeefa7349e16 100644 --- a/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java @@ -175,16 +175,16 @@ public abstract class AggregatorTestCase extends OpenSearchTestCase { protected ValuesSourceRegistry valuesSourceRegistry; // A list of field types that should not be tested, or are not currently supported - private static List TYPE_TEST_BLACKLIST; + private static List TYPE_TEST_DENYLIST; static { - List blacklist = new ArrayList<>(); - blacklist.add(ObjectMapper.CONTENT_TYPE); // Cannot aggregate objects - blacklist.add(GeoShapeFieldMapper.CONTENT_TYPE); // Cannot aggregate geoshapes (yet) - blacklist.add(ObjectMapper.NESTED_CONTENT_TYPE); // TODO support for nested - blacklist.add(CompletionFieldMapper.CONTENT_TYPE); // TODO support completion - blacklist.add(FieldAliasMapper.CONTENT_TYPE); // TODO support alias - TYPE_TEST_BLACKLIST = blacklist; + List denylist = new ArrayList<>(); + denylist.add(ObjectMapper.CONTENT_TYPE); // Cannot aggregate objects + denylist.add(GeoShapeFieldMapper.CONTENT_TYPE); // Cannot aggregate geoshapes (yet) + denylist.add(ObjectMapper.NESTED_CONTENT_TYPE); // TODO support for nested + denylist.add(CompletionFieldMapper.CONTENT_TYPE); // TODO support completion + denylist.add(FieldAliasMapper.CONTENT_TYPE); // TODO support alias + TYPE_TEST_DENYLIST = denylist; } /** @@ -713,11 +713,11 @@ protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldTy } /** - * A method that allows implementors to specifically blacklist particular field types (based on their content_name). + * A method that allows implementors to specifically denylist particular field types (based on their content_name). * This is needed in some areas where the ValuesSourceType is not granular enough, for example integer values * vs floating points, or `keyword` bytes vs `binary` bytes (which are not searchable) * - * This is a blacklist instead of a whitelist because there are vastly more field types than ValuesSourceTypes, + * This is a denylist instead of an allowlist because there are vastly more field types than ValuesSourceTypes, * and it's expected that these unsupported cases are exceptional rather than common */ protected List unsupportedMappedFieldTypes() { @@ -748,7 +748,7 @@ public void testSupportedFieldTypes() throws IOException { for (Map.Entry mappedType : mapperRegistry.getMapperParsers().entrySet()) { // Some field types should not be tested, or require more work and are not ready yet - if (TYPE_TEST_BLACKLIST.contains(mappedType.getKey())) { + if (TYPE_TEST_DENYLIST.contains(mappedType.getKey())) { continue; } diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java index 4113fa89487e2..495eb73e3f39a 100644 --- a/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java +++ b/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java @@ -53,7 +53,8 @@ public void setupSuiteScopeCluster() throws Exception { final int numDocs = 10; for (int i = 0; i < numDocs; i++) { // TODO randomize the size and the params in here? builders.add( - client().prepareIndex("idx", "type", String.valueOf(i)) + client().prepareIndex("idx") + .setId(String.valueOf(i)) .setSource( jsonBuilder().startObject() .field("value", i + 1) @@ -79,7 +80,8 @@ public void setupSuiteScopeCluster() throws Exception { builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { builders.add( - client().prepareIndex("empty_bucket_idx", "type", String.valueOf(i)) + client().prepareIndex("empty_bucket_idx") + .setId(String.valueOf(i)) .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) ); } diff --git a/test/framework/src/main/java/org/opensearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/opensearch/snapshots/AbstractSnapshotIntegTestCase.java index 3511a1d750738..3a55848c46150 100644 --- a/test/framework/src/main/java/org/opensearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -444,7 +444,7 @@ protected void indexRandomDocs(String index, int numdocs) throws InterruptedExce logger.info("--> indexing [{}] documents into [{}]", numdocs, index); IndexRequestBuilder[] builders = new IndexRequestBuilder[numdocs]; for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex(index, "_doc").setId(Integer.toString(i)).setSource("field1", "bar " + i); + builders[i] = client().prepareIndex(index).setId(Integer.toString(i)).setSource("field1", "bar " + i); } indexRandom(true, builders); flushAndRefresh(index); diff --git a/test/framework/src/main/java/org/opensearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/opensearch/test/AbstractBuilderTestCase.java index 5f585434a4e32..e5d14333de828 100644 --- a/test/framework/src/main/java/org/opensearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/AbstractBuilderTestCase.java @@ -439,7 +439,6 @@ public void onRemoval(ShardId shardId, Accountable accountable) { new CompressedXContent( Strings.toString( PutMappingRequest.buildFromSimplifiedDef( - "_doc", TEXT_FIELD_NAME, "type=text", KEYWORD_FIELD_NAME, diff --git a/test/framework/src/main/java/org/opensearch/test/BackgroundIndexer.java b/test/framework/src/main/java/org/opensearch/test/BackgroundIndexer.java index 62fdd2d7314c0..f7271a5fe8c20 100644 --- a/test/framework/src/main/java/org/opensearch/test/BackgroundIndexer.java +++ b/test/framework/src/main/java/org/opensearch/test/BackgroundIndexer.java @@ -170,10 +170,10 @@ public void run() { for (int i = 0; i < batchSize; i++) { id = idGenerator.incrementAndGet(); if (useAutoGeneratedIDs) { - bulkRequest.add(client.prepareIndex(index, type).setSource(generateSource(id, threadRandom))); + bulkRequest.add(client.prepareIndex(index).setSource(generateSource(id, threadRandom))); } else { bulkRequest.add( - client.prepareIndex(index, type, Long.toString(id)).setSource(generateSource(id, threadRandom)) + client.prepareIndex(index).setId(Long.toString(id)).setSource(generateSource(id, threadRandom)) ); } } @@ -201,7 +201,7 @@ public void run() { id = idGenerator.incrementAndGet(); if (useAutoGeneratedIDs) { try { - IndexResponse indexResponse = client.prepareIndex(index, type) + IndexResponse indexResponse = client.prepareIndex(index) .setTimeout(timeout) .setSource(generateSource(id, threadRandom)) .get(); @@ -214,7 +214,8 @@ public void run() { } } else { try { - IndexResponse indexResponse = client.prepareIndex(index, type, Long.toString(id)) + IndexResponse indexResponse = client.prepareIndex(index) + .setId(Long.toString(id)) .setTimeout(timeout) .setSource(generateSource(id, threadRandom)) .get(); diff --git a/test/framework/src/main/java/org/opensearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/opensearch/test/InternalTestCluster.java index 4342f789cb092..5ae441ed651b1 100644 --- a/test/framework/src/main/java/org/opensearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/opensearch/test/InternalTestCluster.java @@ -454,9 +454,8 @@ public InternalTestCluster( * It's only possible to change {@link #bootstrapMasterNodeIndex} value if autoManageMasterNodes is false. */ public void setBootstrapMasterNodeIndex(int bootstrapMasterNodeIndex) { - assert autoManageMasterNodes == false - || bootstrapMasterNodeIndex == -1 : "bootstrapMasterNodeIndex should be -1 if autoManageMasterNodes is true, but was " - + bootstrapMasterNodeIndex; + assert autoManageMasterNodes == false || bootstrapMasterNodeIndex == -1 + : "bootstrapMasterNodeIndex should be -1 if autoManageMasterNodes is true, but was " + bootstrapMasterNodeIndex; this.bootstrapMasterNodeIndex = bootstrapMasterNodeIndex; } diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java index bb5268122af42..b9ee655dcc979 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java @@ -872,13 +872,7 @@ public void assertResultsAndLogOnFailure(long expectedResults, SearchResponse se sb.append(value).append("] results. expected [").append(expectedResults).append("]"); String failMsg = sb.toString(); for (SearchHit hit : searchResponse.getHits().getHits()) { - sb.append("\n-> _index: [") - .append(hit.getIndex()) - .append("] type [") - .append(hit.getType()) - .append("] id [") - .append(hit.getId()) - .append("]"); + sb.append("\n-> _index: [").append(hit.getIndex()).append("] id [").append(hit.getId()).append("]"); } logger.warn("{}", sb); fail(failMsg); @@ -1102,7 +1096,7 @@ protected void ensureClusterStateConsistency() throws IOException { // remove local node reference masterClusterState = ClusterState.Builder.fromBytes(masterClusterStateBytes, null, namedWriteableRegistry); Map masterStateMap = convertToMap(masterClusterState); - int masterClusterStateSize = ClusterState.Builder.toBytes(masterClusterState).length; + int masterClusterStateSize = masterClusterState.toString().length(); String masterId = masterClusterState.nodes().getMasterNodeId(); for (Client client : cluster().getClients()) { ClusterState localClusterState = client.admin().cluster().prepareState().all().setLocal(true).get().getState(); @@ -1110,7 +1104,7 @@ protected void ensureClusterStateConsistency() throws IOException { // remove local node reference localClusterState = ClusterState.Builder.fromBytes(localClusterStateBytes, null, namedWriteableRegistry); final Map localStateMap = convertToMap(localClusterState); - final int localClusterStateSize = ClusterState.Builder.toBytes(localClusterState).length; + final int localClusterStateSize = localClusterState.toString().length(); // Check that the non-master node has the same version of the cluster state as the master and // that the master node matches the master (otherwise there is no requirement for the cluster state to match) if (masterClusterState.version() == localClusterState.version() @@ -1118,7 +1112,10 @@ protected void ensureClusterStateConsistency() throws IOException { try { assertEquals("cluster state UUID does not match", masterClusterState.stateUUID(), localClusterState.stateUUID()); // We cannot compare serialization bytes since serialization order of maps is not guaranteed - // but we can compare serialization sizes - they should be the same + // We also cannot compare byte array size because CompressedXContent's DeflateCompressor uses + // a synced flush that can affect the size of the compressed byte array + // (see: DeflateCompressedXContentTests#testDifferentCompressedRepresentation for an example) + // instead we compare the string length of cluster state - they should be the same assertEquals("cluster state size does not match", masterClusterStateSize, localClusterStateSize); // Compare JSON serialization assertNull( @@ -1337,8 +1334,9 @@ protected void ensureFullyConnectedCluster() { * client().prepareIndex(index, type).setSource(source).execute().actionGet(); * */ + @Deprecated protected final IndexResponse index(String index, String type, XContentBuilder source) { - return client().prepareIndex(index, type).setSource(source).execute().actionGet(); + return client().prepareIndex(index).setSource(source).execute().actionGet(); } /** @@ -1348,7 +1346,7 @@ protected final IndexResponse index(String index, String type, XContentBuilder s * */ protected final IndexResponse index(String index, String type, String id, Map source) { - return client().prepareIndex(index, type, id).setSource(source).execute().actionGet(); + return client().prepareIndex(index).setId(id).setSource(source).execute().actionGet(); } /** @@ -1357,8 +1355,9 @@ protected final IndexResponse index(String index, String type, String id, Map */ + @Deprecated protected final IndexResponse index(String index, String type, String id, XContentBuilder source) { - return client().prepareIndex(index, type, id).setSource(source).execute().actionGet(); + return client().prepareIndex(index).setId(id).setSource(source).execute().actionGet(); } /** @@ -1367,8 +1366,9 @@ protected final IndexResponse index(String index, String type, String id, XConte * return client().prepareIndex(index, type, id).setSource(source).execute().actionGet(); * */ + @Deprecated protected final IndexResponse index(String index, String type, String id, Object... source) { - return client().prepareIndex(index, type, id).setSource(source).execute().actionGet(); + return client().prepareIndex(index).setId(id).setSource(source).execute().actionGet(); } /** @@ -1379,8 +1379,9 @@ protected final IndexResponse index(String index, String type, String id, Object *

        * where source is a JSON String. */ + @Deprecated protected final IndexResponse index(String index, String type, String id, String source) { - return client().prepareIndex(index, type, id).setSource(source, XContentType.JSON).execute().actionGet(); + return client().prepareIndex(index).setId(id).setSource(source, XContentType.JSON).execute().actionGet(); } /** @@ -1537,10 +1538,9 @@ public void indexRandom(boolean forceRefresh, boolean dummyDocuments, List builders) throws InterruptedException { Random random = random(); - Map> indicesAndTypes = new HashMap<>(); + Set indices = new HashSet<>(); for (IndexRequestBuilder builder : builders) { - final Set types = indicesAndTypes.computeIfAbsent(builder.request().index(), index -> new HashSet<>()); - types.add(builder.request().type()); + indices.add(builder.request().index()); } Set> bogusIds = new HashSet<>(); // (index, type, id) if (random.nextBoolean() && !builders.isEmpty() && dummyDocuments) { @@ -1549,22 +1549,18 @@ public void indexRandom(boolean forceRefresh, boolean dummyDocuments, boolean ma final int numBogusDocs = scaledRandomIntBetween(1, builders.size() * 2); final int unicodeLen = between(1, 10); for (int i = 0; i < numBogusDocs; i++) { - String id = "bogus_doc_" - + randomRealisticUnicodeOfLength(unicodeLen) - + Integer.toString(dummmyDocIdGenerator.incrementAndGet()); - Map.Entry> indexAndTypes = RandomPicks.randomFrom(random, indicesAndTypes.entrySet()); - String index = indexAndTypes.getKey(); - String type = RandomPicks.randomFrom(random, indexAndTypes.getValue()); - bogusIds.add(Arrays.asList(index, type, id)); + String id = "bogus_doc_" + randomRealisticUnicodeOfLength(unicodeLen) + dummmyDocIdGenerator.incrementAndGet(); + String index = RandomPicks.randomFrom(random, indices); + bogusIds.add(Arrays.asList(index, id)); // We configure a routing key in case the mapping requires it - builders.add(client().prepareIndex(index, type, id).setSource("{}", XContentType.JSON).setRouting(id)); + builders.add(client().prepareIndex().setIndex(index).setId(id).setSource("{}", XContentType.JSON).setRouting(id)); } } Collections.shuffle(builders, random()); final CopyOnWriteArrayList> errors = new CopyOnWriteArrayList<>(); List inFlightAsyncOperations = new ArrayList<>(); // If you are indexing just a few documents then frequently do it one at a time. If many then frequently in bulk. - final String[] indices = indicesAndTypes.keySet().toArray(new String[0]); + final String[] indicesArray = indices.toArray(new String[] {}); if (builders.size() < FREQUENT_BULK_THRESHOLD ? frequently() : builders.size() < ALWAYS_BULK_THRESHOLD ? rarely() : false) { if (frequently()) { logger.info("Index [{}] docs async: [{}] bulk: [{}]", builders.size(), true, false); @@ -1572,13 +1568,13 @@ public void indexRandom(boolean forceRefresh, boolean dummyDocuments, boolean ma indexRequestBuilder.execute( new PayloadLatchedActionListener<>(indexRequestBuilder, newLatch(inFlightAsyncOperations), errors) ); - postIndexAsyncActions(indices, inFlightAsyncOperations, maybeFlush); + postIndexAsyncActions(indicesArray, inFlightAsyncOperations, maybeFlush); } } else { logger.info("Index [{}] docs async: [{}] bulk: [{}]", builders.size(), false, false); for (IndexRequestBuilder indexRequestBuilder : builders) { indexRequestBuilder.execute().actionGet(); - postIndexAsyncActions(indices, inFlightAsyncOperations, maybeFlush); + postIndexAsyncActions(indicesArray, inFlightAsyncOperations, maybeFlush); } } } else { @@ -1614,15 +1610,15 @@ public void indexRandom(boolean forceRefresh, boolean dummyDocuments, boolean ma // delete the bogus types again - it might trigger merges or at least holes in the segments and enforces deleted docs! for (List doc : bogusIds) { assertEquals( - "failed to delete a dummy doc [" + doc.get(0) + "][" + doc.get(2) + "]", + "failed to delete a dummy doc [" + doc.get(0) + "][" + doc.get(1) + "]", DocWriteResponse.Result.DELETED, - client().prepareDelete(doc.get(0), doc.get(1), doc.get(2)).setRouting(doc.get(2)).get().getResult() + client().prepareDelete(doc.get(0), doc.get(1)).setRouting(doc.get(1)).get().getResult() ); } } if (forceRefresh) { assertNoFailures( - client().admin().indices().prepareRefresh(indices).setIndicesOptions(IndicesOptions.lenientExpandOpen()).get() + client().admin().indices().prepareRefresh(indicesArray).setIndicesOptions(IndicesOptions.lenientExpandOpen()).get() ); } } diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java index 960400019f7ea..83e59e1edd8c8 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java @@ -304,7 +304,9 @@ protected IndexService createIndex(String index, Settings settings) { /** * Create a new index on the singleton node with the provided index settings. + * @deprecated types are being removed */ + @Deprecated protected IndexService createIndex(String index, Settings settings, String type, XContentBuilder mappings) { CreateIndexRequestBuilder createIndexRequestBuilder = client().admin().indices().prepareCreate(index).setSettings(settings); if (type != null && mappings != null) { @@ -315,7 +317,9 @@ protected IndexService createIndex(String index, Settings settings, String type, /** * Create a new index on the singleton node with the provided index settings. + * @deprecated types are being removed */ + @Deprecated protected IndexService createIndex(String index, Settings settings, String type, Object... mappings) { CreateIndexRequestBuilder createIndexRequestBuilder = client().admin().indices().prepareCreate(index).setSettings(settings); if (type != null) { diff --git a/test/framework/src/main/java/org/opensearch/test/XContentTestUtils.java b/test/framework/src/main/java/org/opensearch/test/XContentTestUtils.java index 1a83a1d615de9..5945ac01b4547 100644 --- a/test/framework/src/main/java/org/opensearch/test/XContentTestUtils.java +++ b/test/framework/src/main/java/org/opensearch/test/XContentTestUtils.java @@ -284,8 +284,8 @@ public static BytesReference insertRandomFields( * */ static List getInsertPaths(XContentParser parser, Stack currentPath) throws IOException { - assert parser.currentToken() == XContentParser.Token.START_OBJECT - || parser.currentToken() == XContentParser.Token.START_ARRAY : "should only be called when new objects or arrays start"; + assert parser.currentToken() == XContentParser.Token.START_OBJECT || parser.currentToken() == XContentParser.Token.START_ARRAY + : "should only be called when new objects or arrays start"; List validPaths = new ArrayList<>(); // parser.currentName() can be null for root object and unnamed objects in arrays if (parser.currentName() != null) { diff --git a/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java b/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java index e0e05bf103dbe..c37eb68a42836 100644 --- a/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java +++ b/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java @@ -269,14 +269,7 @@ public static void assertSearchHits(SearchResponse searchResponse, String... ids Set idsSet = new HashSet<>(Arrays.asList(ids)); for (SearchHit hit : searchResponse.getHits()) { assertThat( - "id [" - + hit.getId() - + "] was found in search results but wasn't expected (type [" - + hit.getType() - + "], index [" - + hit.getIndex() - + "])" - + shardStatus, + "id [" + hit.getId() + "] was found in search results but wasn't expected (index [" + hit.getIndex() + "])" + shardStatus, idsSet.remove(hit.getId()), equalTo(true) ); @@ -318,13 +311,7 @@ public static void assertHitCount(SearchResponse countResponse, long expectedHit } public static void assertExists(GetResponse response) { - String message = String.format( - Locale.ROOT, - "Expected %s/%s/%s to exist, but does not", - response.getIndex(), - response.getType(), - response.getId() - ); + String message = String.format(Locale.ROOT, "Expected %s/%s to exist, but does not", response.getIndex(), response.getId()); assertThat(message, response.isExists(), is(true)); } @@ -553,10 +540,6 @@ public static Matcher hasId(final String id) { return new OpenSearchMatchers.SearchHitHasIdMatcher(id); } - public static Matcher hasType(final String type) { - return new OpenSearchMatchers.SearchHitHasTypeMatcher(type); - } - public static Matcher hasIndex(final String index) { return new OpenSearchMatchers.SearchHitHasIndexMatcher(index); } diff --git a/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchMatchers.java b/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchMatchers.java index 38f569f8a4526..5889b7e269ed2 100644 --- a/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchMatchers.java +++ b/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchMatchers.java @@ -65,29 +65,6 @@ public void describeTo(final Description description) { } } - public static class SearchHitHasTypeMatcher extends TypeSafeMatcher { - private String type; - - public SearchHitHasTypeMatcher(String type) { - this.type = type; - } - - @Override - public boolean matchesSafely(final SearchHit searchHit) { - return searchHit.getType().equals(type); - } - - @Override - public void describeMismatchSafely(final SearchHit searchHit, final Description mismatchDescription) { - mismatchDescription.appendText(" was ").appendValue(searchHit.getType()); - } - - @Override - public void describeTo(final Description description) { - description.appendText("searchHit type should be ").appendValue(type); - } - } - public static class SearchHitHasIndexMatcher extends TypeSafeMatcher { private String index; diff --git a/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java b/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java index 9603b63337842..27369e79e5dee 100644 --- a/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java @@ -968,10 +968,7 @@ protected static void createIndex(String name, Settings settings, String mapping entity += "}"; if (settings.getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) == false) { expectSoftDeletesWarning(request, name); - } else if (settings.hasValue(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey()) - || settings.hasValue(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey())) { - expectTranslogRetentionWarning(request); - } + } request.setJsonEntity(entity); client().performRequest(request); } @@ -1025,21 +1022,6 @@ protected static void expectSoftDeletesWarning(Request request, String indexName } } - protected static void expectTranslogRetentionWarning(Request request) { - final List expectedWarnings = Collections.singletonList( - "Translog retention settings [index.translog.retention.age] " - + "and [index.translog.retention.size] are deprecated and effectively ignored. They will be removed in a future version." - ); - final Builder requestOptions = RequestOptions.DEFAULT.toBuilder(); - if (nodeVersions.stream().allMatch(version -> version.onOrAfter(LegacyESVersion.V_7_7_0))) { - requestOptions.setWarningsHandler(warnings -> warnings.equals(expectedWarnings) == false); - request.setOptions(requestOptions); - } else if (nodeVersions.stream().anyMatch(version -> version.onOrAfter(LegacyESVersion.V_7_7_0))) { - requestOptions.setWarningsHandler(warnings -> warnings.isEmpty() == false && warnings.equals(expectedWarnings) == false); - request.setOptions(requestOptions); - } - } - protected static Map getIndexSettings(String index) throws IOException { Request request = new Request("GET", "/" + index + "/_settings"); request.addParameter("flat_settings", "true"); diff --git a/test/framework/src/main/java/org/opensearch/test/rest/yaml/BlacklistedPathPatternMatcher.java b/test/framework/src/main/java/org/opensearch/test/rest/yaml/BlacklistedPathPatternMatcher.java index 8a2e9deb424aa..15510e368b1f5 100644 --- a/test/framework/src/main/java/org/opensearch/test/rest/yaml/BlacklistedPathPatternMatcher.java +++ b/test/framework/src/main/java/org/opensearch/test/rest/yaml/BlacklistedPathPatternMatcher.java @@ -34,7 +34,7 @@ import java.util.regex.Pattern; /** - * Matches blacklist patterns. + * Matches denylist patterns. * * Currently the following syntax is supported: * @@ -45,20 +45,20 @@ * indices.get/10_basic/advanced/allow_no_indices (contains an additional segment) * * - * Each blacklist pattern is a suffix match on the path. Empty patterns are not allowed. + * Each denylist pattern is a suffix match on the path. Empty patterns are not allowed. */ final class BlacklistedPathPatternMatcher { private final Pattern pattern; /** - * Constructs a new BlacklistedPathPatternMatcher instance from the provided suffix pattern. + * Constructs a new DenylistedPathPatternMatcher instance from the provided suffix pattern. * * @param p The suffix pattern. Must be a non-empty string. */ BlacklistedPathPatternMatcher(String p) { // guard against accidentally matching everything as an empty string lead to the pattern ".*" which matches everything if (p == null || p.trim().isEmpty()) { - throw new IllegalArgumentException("Empty blacklist patterns are not supported"); + throw new IllegalArgumentException("Empty denylist patterns are not supported"); } // very simple transformation from wildcard to a proper regex String finalPattern = p.replaceAll("\\*", "[^/]*") // support wildcard matches (within a single path segment) diff --git a/test/framework/src/main/java/org/opensearch/test/rest/yaml/ClientYamlTestExecutionContext.java b/test/framework/src/main/java/org/opensearch/test/rest/yaml/ClientYamlTestExecutionContext.java index 9a611231a9fa0..4c3a1ec863d31 100644 --- a/test/framework/src/main/java/org/opensearch/test/rest/yaml/ClientYamlTestExecutionContext.java +++ b/test/framework/src/main/java/org/opensearch/test/rest/yaml/ClientYamlTestExecutionContext.java @@ -39,7 +39,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.BytesRef; -import org.opensearch.LegacyESVersion; import org.opensearch.Version; import org.opensearch.client.NodeSelector; import org.opensearch.common.bytes.BytesReference; @@ -54,8 +53,6 @@ import java.util.List; import java.util.Map; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; - /** * Execution context passed across the REST tests. * Holds the REST client used to communicate with opensearch. @@ -121,10 +118,6 @@ public ClientYamlTestResponse callApi( } } - if (esVersion().before(LegacyESVersion.V_7_0_0)) { - adaptRequestForOlderVersion(apiName, bodies, requestParams); - } - HttpEntity entity = createEntity(bodies, requestHeaders); try { response = callApiInternal(apiName, requestParams, entity, requestHeaders, nodeSelector); @@ -140,62 +133,6 @@ public ClientYamlTestResponse callApi( } } - /** - * To allow tests to run against a mixed 7.x/6.x cluster, we make certain modifications to the - * request related to types. - * - * Specifically, we generally use typeless index creation and document writes in test set-up code. - * This functionality is supported in 7.x, but is not supported in 6.x (or is not the default - * behavior). Here we modify the request so that it will work against a 6.x node. - */ - private void adaptRequestForOlderVersion(String apiName, List> bodies, Map requestParams) { - // For index creations, we specify 'include_type_name=false' if it is not explicitly set. This - // allows us to omit the parameter in the test description, while still being able to communicate - // with 6.x nodes where include_type_name defaults to 'true'. - if (apiName.equals("indices.create") && requestParams.containsKey(INCLUDE_TYPE_NAME_PARAMETER) == false) { - requestParams.put(INCLUDE_TYPE_NAME_PARAMETER, "false"); - } - - // We add the type to the document API requests if it's not already included. - if ((apiName.equals("index") || apiName.equals("update") || apiName.equals("delete") || apiName.equals("get")) - && requestParams.containsKey("type") == false) { - requestParams.put("type", "_doc"); - } - - // We also add the type to the bulk API requests if it's not already included. The type can either - // be on the request parameters or in the action metadata in the body of the request so we need to - // be sensitive to both scenarios. - if (apiName.equals("bulk") && requestParams.containsKey("type") == false) { - if (requestParams.containsKey("index")) { - requestParams.put("type", "_doc"); - } else { - for (int i = 0; i < bodies.size(); i++) { - Map body = bodies.get(i); - Map actionMetadata; - if (body.containsKey("index")) { - actionMetadata = (Map) body.get("index"); - i++; - } else if (body.containsKey("create")) { - actionMetadata = (Map) body.get("create"); - i++; - } else if (body.containsKey("update")) { - actionMetadata = (Map) body.get("update"); - i++; - } else if (body.containsKey("delete")) { - actionMetadata = (Map) body.get("delete"); - } else { - // action metadata is malformed so leave it malformed since - // the test is probably testing for malformed action metadata - continue; - } - if (actionMetadata.containsKey("_type") == false) { - actionMetadata.put("_type", "_doc"); - } - } - } - } - } - private HttpEntity createEntity(List> bodies, Map headers) throws IOException { if (bodies.isEmpty()) { return null; diff --git a/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java index 375103d2c1d0f..ca2659e9523e6 100644 --- a/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java @@ -88,13 +88,13 @@ public abstract class OpenSearchClientYamlSuiteTestCase extends OpenSearchRestTe */ public static final String REST_TESTS_SUITE = "tests.rest.suite"; /** - * Property that allows to blacklist some of the REST tests based on a comma separated list of globs + * Property that allows to denylist some of the REST tests based on a comma separated list of globs * e.g. "-Dtests.rest.blacklist=get/10_basic/*" */ public static final String REST_TESTS_BLACKLIST = "tests.rest.blacklist"; /** - * We use tests.rest.blacklist in build files to blacklist tests; this property enables a user to add additional blacklisted tests on - * top of the tests blacklisted in the build. + * We use tests.rest.blacklist in build files to denylist tests; this property enables a user to add additional denylisted tests on + * top of the tests denylisted in the build. */ public static final String REST_TESTS_BLACKLIST_ADDITIONS = "tests.rest.blacklist_additions"; /** @@ -116,7 +116,7 @@ public abstract class OpenSearchClientYamlSuiteTestCase extends OpenSearchRestTe */ private static final String PATHS_SEPARATOR = "(? blacklistPathMatchers; + private static List denylistPathMatchers; private static ClientYamlTestExecutionContext restTestExecutionContext; private static ClientYamlTestExecutionContext adminExecutionContext; private static ClientYamlTestClient clientYamlTestClient; @@ -138,7 +138,7 @@ public static void initializeUseDefaultNumberOfShards() { public void initAndResetContext() throws Exception { if (restTestExecutionContext == null) { assert adminExecutionContext == null; - assert blacklistPathMatchers == null; + assert denylistPathMatchers == null; final ClientYamlSuiteRestSpec restSpec = ClientYamlSuiteRestSpec.load(SPEC_PATH); validateSpec(restSpec); final List hosts = getClusterHosts(); @@ -149,21 +149,21 @@ public void initAndResetContext() throws Exception { clientYamlTestClient = initClientYamlTestClient(restSpec, client(), hosts, esVersion, masterVersion); restTestExecutionContext = new ClientYamlTestExecutionContext(clientYamlTestClient, randomizeContentType()); adminExecutionContext = new ClientYamlTestExecutionContext(clientYamlTestClient, false); - final String[] blacklist = resolvePathsProperty(REST_TESTS_BLACKLIST, null); - blacklistPathMatchers = new ArrayList<>(); - for (final String entry : blacklist) { - blacklistPathMatchers.add(new BlacklistedPathPatternMatcher(entry)); + final String[] denylist = resolvePathsProperty(REST_TESTS_BLACKLIST, null); + denylistPathMatchers = new ArrayList<>(); + for (final String entry : denylist) { + denylistPathMatchers.add(new BlacklistedPathPatternMatcher(entry)); } - final String[] blacklistAdditions = resolvePathsProperty(REST_TESTS_BLACKLIST_ADDITIONS, null); - for (final String entry : blacklistAdditions) { - blacklistPathMatchers.add(new BlacklistedPathPatternMatcher(entry)); + final String[] denylistAdditions = resolvePathsProperty(REST_TESTS_BLACKLIST_ADDITIONS, null); + for (final String entry : denylistAdditions) { + denylistPathMatchers.add(new BlacklistedPathPatternMatcher(entry)); } } assert restTestExecutionContext != null; assert adminExecutionContext != null; - assert blacklistPathMatchers != null; + assert denylistPathMatchers != null; - // admin context must be available for @After always, regardless of whether the test was blacklisted + // admin context must be available for @After always, regardless of whether the test was denylisted adminExecutionContext.clear(); restTestExecutionContext.clear(); @@ -184,7 +184,7 @@ public static void closeClient() throws IOException { try { IOUtils.close(clientYamlTestClient); } finally { - blacklistPathMatchers = null; + denylistPathMatchers = null; restTestExecutionContext = null; adminExecutionContext = null; clientYamlTestClient = null; @@ -355,12 +355,12 @@ protected RequestOptions getCatNodesVersionMasterRequestOptions() { } public void test() throws IOException { - // skip test if it matches one of the blacklist globs - for (BlacklistedPathPatternMatcher blacklistedPathMatcher : blacklistPathMatchers) { + // skip test if it matches one of the denylist globs + for (BlacklistedPathPatternMatcher denylistedPathMatcher : denylistPathMatchers) { String testPath = testCandidate.getSuitePath() + "/" + testCandidate.getTestSection().getName(); assumeFalse( "[" + testCandidate.getTestPath() + "] skipped, reason: blacklisted", - blacklistedPathMatcher.isSuffixMatch(testPath) + denylistedPathMatcher.isSuffixMatch(testPath) ); } diff --git a/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java index b1d47d456bf8b..ec88cd0201db5 100644 --- a/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java @@ -2485,31 +2485,26 @@ public void testConcurrentDisconnectOnNonPublishedConnection() throws IOExceptio MockTransportService serviceC = buildService("TS_C", version0, Settings.EMPTY); CountDownLatch receivedLatch = new CountDownLatch(1); CountDownLatch sendResponseLatch = new CountDownLatch(1); - serviceC.registerRequestHandler( - "internal:action", - ThreadPool.Names.SAME, - TestRequest::new, - (request, channel, task) -> { - // don't block on a network thread here - threadPool.generic().execute(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - try { - channel.sendResponse(e); - } catch (IOException e1) { - throw new UncheckedIOException(e1); - } + serviceC.registerRequestHandler("internal:action", ThreadPool.Names.SAME, TestRequest::new, (request, channel, task) -> { + // don't block on a network thread here + threadPool.generic().execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + try { + channel.sendResponse(e); + } catch (IOException e1) { + throw new UncheckedIOException(e1); } + } - @Override - protected void doRun() throws Exception { - receivedLatch.countDown(); - sendResponseLatch.await(); - channel.sendResponse(TransportResponse.Empty.INSTANCE); - } - }); - } - ); + @Override + protected void doRun() throws Exception { + receivedLatch.countDown(); + sendResponseLatch.await(); + channel.sendResponse(TransportResponse.Empty.INSTANCE); + } + }); + }); serviceC.start(); serviceC.acceptIncomingRequests(); CountDownLatch responseLatch = new CountDownLatch(1); @@ -2564,31 +2559,26 @@ public void testTransportStats() throws Exception { MockTransportService serviceC = buildService("TS_C", version0, Settings.EMPTY); CountDownLatch receivedLatch = new CountDownLatch(1); CountDownLatch sendResponseLatch = new CountDownLatch(1); - serviceB.registerRequestHandler( - "internal:action", - ThreadPool.Names.SAME, - TestRequest::new, - (request, channel, task) -> { - // don't block on a network thread here - threadPool.generic().execute(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - try { - channel.sendResponse(e); - } catch (IOException e1) { - throw new UncheckedIOException(e1); - } + serviceB.registerRequestHandler("internal:action", ThreadPool.Names.SAME, TestRequest::new, (request, channel, task) -> { + // don't block on a network thread here + threadPool.generic().execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + try { + channel.sendResponse(e); + } catch (IOException e1) { + throw new UncheckedIOException(e1); } + } - @Override - protected void doRun() throws Exception { - receivedLatch.countDown(); - sendResponseLatch.await(); - channel.sendResponse(TransportResponse.Empty.INSTANCE); - } - }); - } - ); + @Override + protected void doRun() throws Exception { + receivedLatch.countDown(); + sendResponseLatch.await(); + channel.sendResponse(TransportResponse.Empty.INSTANCE); + } + }); + }); serviceC.start(); serviceC.acceptIncomingRequests(); CountDownLatch responseLatch = new CountDownLatch(1); @@ -2688,31 +2678,26 @@ public void testTransportStatsWithException() throws Exception { CountDownLatch sendResponseLatch = new CountDownLatch(1); Exception ex = new RuntimeException("boom"); ex.setStackTrace(new StackTraceElement[0]); - serviceB.registerRequestHandler( - "internal:action", - ThreadPool.Names.SAME, - TestRequest::new, - (request, channel, task) -> { - // don't block on a network thread here - threadPool.generic().execute(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - try { - channel.sendResponse(e); - } catch (IOException e1) { - throw new UncheckedIOException(e1); - } + serviceB.registerRequestHandler("internal:action", ThreadPool.Names.SAME, TestRequest::new, (request, channel, task) -> { + // don't block on a network thread here + threadPool.generic().execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + try { + channel.sendResponse(e); + } catch (IOException e1) { + throw new UncheckedIOException(e1); } + } - @Override - protected void doRun() throws Exception { - receivedLatch.countDown(); - sendResponseLatch.await(); - onFailure(ex); - } - }); - } - ); + @Override + protected void doRun() throws Exception { + receivedLatch.countDown(); + sendResponseLatch.await(); + onFailure(ex); + } + }); + }); serviceC.start(); serviceC.acceptIncomingRequests(); CountDownLatch responseLatch = new CountDownLatch(1); diff --git a/test/framework/src/main/java/org/opensearch/upgrades/AbstractFullClusterRestartTestCase.java b/test/framework/src/main/java/org/opensearch/upgrades/AbstractFullClusterRestartTestCase.java index ff4fa54769287..d79e1730e16f6 100644 --- a/test/framework/src/main/java/org/opensearch/upgrades/AbstractFullClusterRestartTestCase.java +++ b/test/framework/src/main/java/org/opensearch/upgrades/AbstractFullClusterRestartTestCase.java @@ -108,10 +108,6 @@ protected void assertTotalHits(int expectedTotalHits, Map response) { } protected static int extractTotalHits(Map response) { - if (isRunningAgainstOldCluster() && getOldClusterVersion().before(LegacyESVersion.V_7_0_0)) { - return (Integer) XContentMapValues.extractValue("hits.total", response); - } else { - return (Integer) XContentMapValues.extractValue("hits.total.value", response); - } + return (Integer) XContentMapValues.extractValue("hits.total.value", response); } } diff --git a/test/logger-usage/build.gradle b/test/logger-usage/build.gradle index a80d8a115bca5..2cf271a0f601b 100644 --- a/test/logger-usage/build.gradle +++ b/test/logger-usage/build.gradle @@ -31,8 +31,8 @@ apply plugin: 'opensearch.java' dependencies { - api 'org.ow2.asm:asm:7.1' - api 'org.ow2.asm:asm-tree:7.1' + api 'org.ow2.asm:asm:9.2' + api 'org.ow2.asm:asm-tree:9.2' api 'org.ow2.asm:asm-analysis:7.1' api "org.apache.logging.log4j:log4j-api:${versions.log4j}" testImplementation project(":test:framework")