Skip to content

Commit

Permalink
Merge branch 'opensearch-project:main' into wait_until
Browse files Browse the repository at this point in the history
  • Loading branch information
Rishikesh1159 authored Feb 28, 2023
2 parents 36f3851 + 950b86a commit c03fa8f
Show file tree
Hide file tree
Showing 34 changed files with 935 additions and 218 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/dependabot_pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ jobs:
commit_options: '--signoff'

- name: Update the changelog
uses: dangoslen/dependabot-changelog-helper@v2
uses: dangoslen/dependabot-changelog-helper@v3
with:
version: 'Unreleased 3.0'

Expand Down
5 changes: 4 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Bump `joni` from 2.1.44 to 2.1.45
- Bump `commons-io:commons-io` from 2.7 to 2.11.0
- Bump `org.jruby.joni:joni` from 2.1.45 to 2.1.48
- Bump `org.apache.logging.log4j:log4j-core` from 2.19.0 to 2.20.0
- Bump `com.azure:azure-storage-common` from 12.19.3 to 12.20.0

### Changed
- [CCR] Add getHistoryOperationsFromTranslog method to fetch the history snapshot from translogs ([#3948](https://github.com/opensearch-project/OpenSearch/pull/3948))
Expand Down Expand Up @@ -64,6 +66,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Fix 'org.apache.hc.core5.http.ParseException: Invalid protocol version' under JDK 16+ ([#4827](https://github.com/opensearch-project/OpenSearch/pull/4827))
- Fix compression support for h2c protocol ([#4944](https://github.com/opensearch-project/OpenSearch/pull/4944))
- Support OpenSSL Provider with default Netty allocator ([#5460](https://github.com/opensearch-project/OpenSearch/pull/5460))
- Added depth check in doc parser for deep nested document ([#5199](https://github.com/opensearch-project/OpenSearch/pull/5199))

### Security

Expand All @@ -87,4 +90,4 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
### Security

[Unreleased 3.0]: https://github.com/opensearch-project/OpenSearch/compare/2.x...HEAD
[Unreleased 2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.5...2.x
[Unreleased 2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.5...2.x
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@

## Welcome!

**OpenSearch** is [a community-driven, open source fork](https://aws.amazon.com/blogs/opensource/introducing-opensearch/) of [Elasticsearch](https://en.wikipedia.org/wiki/Elasticsearch) and [Kibana](https://en.wikipedia.org/wiki/Kibana) following the [licence change](https://opensource.org/node/1099) in early 2021. We're looking to sustain (and evolve!) a search and analytics suite for the multitude of businesses who are dependent on the rights granted by the original, [Apache v2.0 License](LICENSE.txt).
**OpenSearch** is [a community-driven, open source fork](https://aws.amazon.com/blogs/opensource/introducing-opensearch/) of [Elasticsearch](https://en.wikipedia.org/wiki/Elasticsearch) and [Kibana](https://en.wikipedia.org/wiki/Kibana) following the [license change](https://blog.opensource.org/the-sspl-is-not-an-open-source-license/) in early 2021. We're looking to sustain (and evolve!) a search and analytics suite for the multitude of businesses who are dependent on the rights granted by the original, [Apache v2.0 License](LICENSE.txt).

## Project Resources

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ repositories {
mavenCentral()
}
dependencies {
implementation 'org.apache.logging.log4j:log4j-core:2.19.0'
implementation 'org.apache.logging.log4j:log4j-core:2.20.0'
}

["0.0.1", "0.0.2"].forEach { v ->
Expand Down
2 changes: 1 addition & 1 deletion plugins/repository-azure/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ opensearchplugin {

dependencies {
api 'com.azure:azure-core:1.34.0'
api 'com.azure:azure-storage-common:12.19.3'
api 'com.azure:azure-storage-common:12.20.0'
api 'com.azure:azure-core-http-netty:1.12.8'
api "io.netty:netty-codec-dns:${versions.netty}"
api "io.netty:netty-codec-socks:${versions.netty}"
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
69b962bbeea787c6aca83115472791aacc2ae94c
Original file line number Diff line number Diff line change
Expand Up @@ -424,14 +424,15 @@ public void testFileCacheStats() throws Exception {
final Client client = client();
final int numNodes = 2;

internalCluster().ensureAtLeastNumSearchNodes(numNodes);
internalCluster().ensureAtLeastNumDataNodes(numNodes);
createIndexWithDocsAndEnsureGreen(1, 100, indexName1);

createRepositoryWithSettings(null, repoName);
takeSnapshot(client, snapshotName, repoName, indexName1);
deleteIndicesAndEnsureGreen(client, indexName1);
assertAllNodesFileCacheEmpty();

internalCluster().ensureAtLeastNumSearchNodes(numNodes);
restoreSnapshotAndEnsureGreen(client, snapshotName, repoName);
assertNodesFileCacheNonEmpty(numNodes);
}
Expand All @@ -440,25 +441,52 @@ private void assertAllNodesFileCacheEmpty() {
NodesStatsResponse response = client().admin().cluster().nodesStats(new NodesStatsRequest().all()).actionGet();
for (NodeStats stats : response.getNodes()) {
FileCacheStats fcstats = stats.getFileCacheStats();
assertNotNull(fcstats);
assertTrue(isFileCacheEmpty(fcstats));
if (fcstats != null) {
assertTrue(isFileCacheEmpty(fcstats));
}
}
}

private void assertNodesFileCacheNonEmpty(int numNodes) {
NodesStatsResponse response = client().admin().cluster().nodesStats(new NodesStatsRequest().all()).actionGet();
int nonEmptyFileCacheNodes = 0;
for (NodeStats stats : response.getNodes()) {
FileCacheStats fcstats = stats.getFileCacheStats();
assertNotNull(fcstats);
if (!isFileCacheEmpty(fcstats)) {
nonEmptyFileCacheNodes++;
FileCacheStats fcStats = stats.getFileCacheStats();
if (stats.getNode().isSearchNode()) {
if (!isFileCacheEmpty(fcStats)) {
nonEmptyFileCacheNodes++;
}
} else {
assertNull(fcStats);
}

}
assertEquals(numNodes, nonEmptyFileCacheNodes);
}

private boolean isFileCacheEmpty(FileCacheStats stats) {
return stats.getUsed().getBytes() == 0L && stats.getActive().getBytes() == 0L;
}

public void testPruneFileCacheOnIndexDeletion() throws Exception {
final String snapshotName = "test-snap";
final String repoName = "test-repo";
final String indexName1 = "test-idx-1";
final String restoredIndexName1 = indexName1 + "-copy";
final Client client = client();
final int numNodes = 2;

internalCluster().ensureAtLeastNumSearchNodes(numNodes);
createIndexWithDocsAndEnsureGreen(1, 100, indexName1);

createRepositoryWithSettings(null, repoName);
takeSnapshot(client, snapshotName, repoName, indexName1);
deleteIndicesAndEnsureGreen(client, indexName1);

restoreSnapshotAndEnsureGreen(client, snapshotName, repoName);
assertNodesFileCacheNonEmpty(numNodes);

deleteIndicesAndEnsureGreen(client, restoredIndexName1);
assertAllNodesFileCacheEmpty();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,10 @@ public static boolean isRemoteClusterClient(final Settings settings) {
return hasRole(settings, DiscoveryNodeRole.REMOTE_CLUSTER_CLIENT_ROLE);
}

public static boolean isSearchNode(Settings settings) {
return hasRole(settings, DiscoveryNodeRole.SEARCH_ROLE);
}

private final String nodeName;
private final String nodeId;
private final String ephemeralId;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import org.opensearch.cluster.routing.allocation.AwarenessReplicaBalance;
import org.opensearch.action.search.CreatePitController;
import org.opensearch.cluster.routing.allocation.decider.NodeLoadAwareAllocationDecider;
import org.opensearch.common.util.FeatureFlags;
import org.opensearch.index.IndexModule;
import org.opensearch.index.IndexSettings;
import org.opensearch.index.IndexingPressure;
Expand Down Expand Up @@ -152,6 +153,7 @@
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate;

Expand Down Expand Up @@ -629,4 +631,14 @@ public void apply(Settings value, Settings current, Settings previous) {

public static List<SettingUpgrader<?>> BUILT_IN_SETTING_UPGRADERS = Collections.emptyList();

/**
* Map of feature flag name to feature-flagged cluster settings. Once each feature
* is ready for production release, the feature flag can be removed, and the
* setting should be moved to {@link #BUILT_IN_CLUSTER_SETTINGS}.
*/
public static final Map<String, List<Setting>> FEATURE_FLAGGED_CLUSTER_SETTINGS = Map.of(
FeatureFlags.SEARCHABLE_SNAPSHOT,
List.of(Node.NODE_SEARCH_CACHE_SIZE_SETTING)
);

}
Original file line number Diff line number Diff line change
Expand Up @@ -91,9 +91,15 @@ public SettingsModule(
registerSetting(setting);
}

for (Map.Entry<String, List<Setting>> featureFlaggedSetting : ClusterSettings.FEATURE_FLAGGED_CLUSTER_SETTINGS.entrySet()) {
if (FeatureFlags.isEnabled(featureFlaggedSetting.getKey())) {
featureFlaggedSetting.getValue().forEach(this::registerSetting);
}
}

for (Map.Entry<String, List<Setting>> featureFlaggedSetting : IndexScopedSettings.FEATURE_FLAGGED_INDEX_SETTINGS.entrySet()) {
if (FeatureFlags.isEnabled(featureFlaggedSetting.getKey())) {
featureFlaggedSetting.getValue().forEach(feature -> registerSetting(feature));
featureFlaggedSetting.getValue().forEach(this::registerSetting);
}
}

Expand Down
Loading

0 comments on commit c03fa8f

Please sign in to comment.