Skip to content

Commit b68c8df

Browse files
committed
Merge branch 'trunk' into HADOOP-18646
# Conflicts: # LICENSE-binary
2 parents 7aa77e8 + 358bf80 commit b68c8df

File tree

66 files changed

+2343
-192
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

66 files changed

+2343
-192
lines changed

LICENSE-binary

Lines changed: 38 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -210,9 +210,9 @@ hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/nvd3-1.8.5.* (css and js
210210
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/AbstractFuture.java
211211
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/TimeoutFuture.java
212212

213-
com.aliyun:aliyun-java-sdk-core:3.4.0
214-
com.aliyun:aliyun-java-sdk-ecs:4.2.0
215-
com.aliyun:aliyun-java-sdk-ram:3.0.0
213+
com.aliyun:aliyun-java-sdk-core:4.5.10
214+
com.aliyun:aliyun-java-sdk-kms:2.11.0
215+
com.aliyun:aliyun-java-sdk-ram:3.1.0
216216
com.aliyun:aliyun-java-sdk-sts:3.0.0
217217
com.aliyun.oss:aliyun-sdk-oss:3.13.2
218218
com.amazonaws:aws-java-sdk-bundle:1.12.316
@@ -259,36 +259,36 @@ io.grpc:grpc-netty:1.26.0
259259
io.grpc:grpc-protobuf:1.26.0
260260
io.grpc:grpc-protobuf-lite:1.26.0
261261
io.grpc:grpc-stub:1.26.0
262-
io.netty:netty-all:4.1.89.Final
263-
io.netty:netty-buffer:4.1.89.Final
264-
io.netty:netty-codec:4.1.89.Final
265-
io.netty:netty-codec-dns:4.1.89.Final
266-
io.netty:netty-codec-haproxy:4.1.89.Final
267-
io.netty:netty-codec-http:4.1.89.Final
268-
io.netty:netty-codec-http2:4.1.89.Final
269-
io.netty:netty-codec-memcache:4.1.89.Final
270-
io.netty:netty-codec-mqtt:4.1.89.Final
271-
io.netty:netty-codec-redis:4.1.89.Final
272-
io.netty:netty-codec-smtp:4.1.89.Final
273-
io.netty:netty-codec-socks:4.1.89.Final
274-
io.netty:netty-codec-stomp:4.1.89.Final
275-
io.netty:netty-codec-xml:4.1.89.Final
276-
io.netty:netty-common:4.1.89.Final
277-
io.netty:netty-handler:4.1.89.Final
278-
io.netty:netty-handler-proxy:4.1.89.Final
279-
io.netty:netty-resolver:4.1.89.Final
280-
io.netty:netty-resolver-dns:4.1.89.Final
281-
io.netty:netty-transport:4.1.89.Final
282-
io.netty:netty-transport-rxtx:4.1.89.Final
283-
io.netty:netty-transport-sctp:4.1.89.Final
284-
io.netty:netty-transport-udt:4.1.89.Final
285-
io.netty:netty-transport-classes-epoll:4.1.89.Final
286-
io.netty:netty-transport-native-unix-common:4.1.89.Final
287-
io.netty:netty-transport-classes-kqueue:4.1.89.Final
288-
io.netty:netty-resolver-dns-classes-macos:4.1.89.Final
289-
io.netty:netty-transport-native-epoll:4.1.89.Final
290-
io.netty:netty-transport-native-kqueue:4.1.89.Final
291-
io.netty:netty-resolver-dns-native-macos:4.1.89.Final
262+
io.netty:netty-all:4.1.77.Final
263+
io.netty:netty-buffer:4.1.77.Final
264+
io.netty:netty-codec:4.1.77.Final
265+
io.netty:netty-codec-dns:4.1.77.Final
266+
io.netty:netty-codec-haproxy:4.1.77.Final
267+
io.netty:netty-codec-http:4.1.77.Final
268+
io.netty:netty-codec-http2:4.1.77.Final
269+
io.netty:netty-codec-memcache:4.1.77.Final
270+
io.netty:netty-codec-mqtt:4.1.77.Final
271+
io.netty:netty-codec-redis:4.1.77.Final
272+
io.netty:netty-codec-smtp:4.1.77.Final
273+
io.netty:netty-codec-socks:4.1.77.Final
274+
io.netty:netty-codec-stomp:4.1.77.Final
275+
io.netty:netty-codec-xml:4.1.77.Final
276+
io.netty:netty-common:4.1.77.Final
277+
io.netty:netty-handler:4.1.77.Final
278+
io.netty:netty-handler-proxy:4.1.77.Final
279+
io.netty:netty-resolver:4.1.77.Final
280+
io.netty:netty-resolver-dns:4.1.77.Final
281+
io.netty:netty-transport:4.1.77.Final
282+
io.netty:netty-transport-rxtx:4.1.77.Final
283+
io.netty:netty-transport-sctp:4.1.77.Final
284+
io.netty:netty-transport-udt:4.1.77.Final
285+
io.netty:netty-transport-classes-epoll:4.1.77.Final
286+
io.netty:netty-transport-native-unix-common:4.1.77.Final
287+
io.netty:netty-transport-classes-kqueue:4.1.77.Final
288+
io.netty:netty-resolver-dns-classes-macos:4.1.77.Final
289+
io.netty:netty-transport-native-epoll:4.1.77.Final
290+
io.netty:netty-transport-native-kqueue:4.1.77.Final
291+
io.netty:netty-resolver-dns-native-macos:4.1.77.Final
292292
io.opencensus:opencensus-api:0.12.3
293293
io.opencensus:opencensus-contrib-grpc-metrics:0.12.3
294294
io.reactivex:rxjava:1.3.8
@@ -357,6 +357,9 @@ org.eclipse.jetty:jetty-xml:9.4.48.v20220622
357357
org.eclipse.jetty.websocket:javax-websocket-client-impl:9.4.48.v20220622
358358
org.eclipse.jetty.websocket:javax-websocket-server-impl:9.4.48.v20220622
359359
org.ehcache:ehcache:3.3.1
360+
org.ini4j:ini4j:0.5.4
361+
org.jetbrains.kotlin:kotlin-stdlib:1.4.10
362+
org.jetbrains.kotlin:kotlin-stdlib-common:1.4.10
360363
org.lz4:lz4-java:1.7.1
361364
org.objenesis:objenesis:2.6
362365
org.xerial.snappy:snappy-java:1.0.5
@@ -516,6 +519,8 @@ Eclipse Public License 1.0
516519
--------------------------
517520

518521
junit:junit:4.13.2
522+
org.jacoco:org.jacoco.agent:0.8.5
523+
519524

520525

521526
HSQL License

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -314,7 +314,8 @@ public static void login(final Configuration conf,
314314

315315
String keytabFilename = conf.get(keytabFileKey);
316316
if (keytabFilename == null || keytabFilename.length() == 0) {
317-
throw new IOException("Running in secure mode, but config doesn't have a keytab");
317+
throw new IOException(
318+
"Running in secure mode, but config doesn't have a keytab for key: " + keytabFileKey);
318319
}
319320

320321
String principalConfig = conf.get(userNameKey, System

hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -370,6 +370,9 @@ Each metrics record contains tags such as SessionId and Hostname as additional i
370370
|:---- |:---- |
371371
| `BytesWritten` | Total number of bytes written to DataNode |
372372
| `BytesRead` | Total number of bytes read from DataNode |
373+
| `ReadTransferRateNumOps` | Total number of data read transfers |
374+
| `ReadTransferRateAvgTime` | Average transfer rate of bytes read from DataNode, measured in bytes per second. |
375+
| `ReadTransferRate`*num*`s(50/75/90/95/99)thPercentileRate` | The 50/75/90/95/99th percentile of the transfer rate of bytes read from DataNode, measured in bytes per second. |
373376
| `BlocksWritten` | Total number of blocks written to DataNode |
374377
| `BlocksRead` | Total number of blocks read from DataNode |
375378
| `BlocksReplicated` | Total number of blocks replicated |

hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java

Lines changed: 13 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
import org.apache.hadoop.classification.InterfaceAudience;
2121
import org.apache.hadoop.conf.Configuration;
2222
import org.apache.hadoop.fs.Path;
23-
import org.apache.log4j.PropertyConfigurator;
23+
2424
import org.slf4j.Logger;
2525
import org.slf4j.LoggerFactory;
2626

@@ -104,8 +104,6 @@ public class KMSConfiguration {
104104

105105
public static final boolean KEY_AUTHORIZATION_ENABLE_DEFAULT = true;
106106

107-
private static final String LOG4J_PROPERTIES = "kms-log4j.properties";
108-
109107
static {
110108
Configuration.addDefaultResource(KMS_DEFAULT_XML);
111109
Configuration.addDefaultResource(KMS_SITE_XML);
@@ -163,31 +161,20 @@ public static boolean isACLsFileNewer(long time) {
163161
return newer;
164162
}
165163

166-
public static void initLogging() {
167-
String confDir = System.getProperty(KMS_CONFIG_DIR);
168-
if (confDir == null) {
169-
throw new RuntimeException("System property '" +
170-
KMSConfiguration.KMS_CONFIG_DIR + "' not defined");
164+
/**
165+
* Validate whether "kms.config.dir" and "log4j.configuration" are defined in the System
166+
* properties. If not, abort the KMS WebServer.
167+
*/
168+
public static void validateSystemProps() {
169+
if (System.getProperty(KMS_CONFIG_DIR) == null) {
170+
String errorMsg = "System property '" + KMS_CONFIG_DIR + "' not defined";
171+
System.err.println("Aborting KMSWebServer because " + errorMsg);
172+
throw new RuntimeException(errorMsg);
171173
}
172174
if (System.getProperty("log4j.configuration") == null) {
173-
System.setProperty("log4j.defaultInitOverride", "true");
174-
boolean fromClasspath = true;
175-
File log4jConf = new File(confDir, LOG4J_PROPERTIES).getAbsoluteFile();
176-
if (log4jConf.exists()) {
177-
PropertyConfigurator.configureAndWatch(log4jConf.getPath(), 1000);
178-
fromClasspath = false;
179-
} else {
180-
ClassLoader cl = Thread.currentThread().getContextClassLoader();
181-
URL log4jUrl = cl.getResource(LOG4J_PROPERTIES);
182-
if (log4jUrl != null) {
183-
PropertyConfigurator.configure(log4jUrl);
184-
}
185-
}
186-
LOG.debug("KMS log starting");
187-
if (fromClasspath) {
188-
LOG.warn("Log4j configuration file '{}' not found", LOG4J_PROPERTIES);
189-
LOG.warn("Logging with INFO level to standard output");
190-
}
175+
String errorMsg = "System property 'log4j.configuration' not defined";
176+
System.err.println("Aborting KMSWebServer because " + errorMsg);
177+
throw new RuntimeException(errorMsg);
191178
}
192179
}
193180
}

hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebServer.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -185,7 +185,7 @@ public URL getKMSUrl() {
185185
}
186186

187187
public static void main(String[] args) throws Exception {
188-
KMSConfiguration.initLogging();
188+
KMSConfiguration.validateSystemProps();
189189
StringUtils.startupShutdownMessage(KMSWebServer.class, args, LOG);
190190
Configuration conf = KMSConfiguration.getKMSConf();
191191
Configuration sslConf = SSLFactory.readSSLConfiguration(conf, SSLFactory.Mode.SERVER);

hadoop-common-project/hadoop-kms/src/main/libexec/shellprofile.d/hadoop-kms.sh

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,8 @@ function hadoop_subcommand_kms
4949
"-Dkms.config.dir=${HADOOP_CONF_DIR}"
5050
hadoop_add_param HADOOP_OPTS "-Dkms.log.dir=" \
5151
"-Dkms.log.dir=${HADOOP_LOG_DIR}"
52+
hadoop_add_param HADOOP_OPTS "-Dlog4j.configuration=" \
53+
"-Dlog4j.configuration=file:${HADOOP_CONF_DIR}/kms-log4j.properties"
5254

5355
if [[ "${HADOOP_DAEMON_MODE}" == "default" ]] ||
5456
[[ "${HADOOP_DAEMON_MODE}" == "start" ]]; then

hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java

Lines changed: 29 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -224,7 +224,7 @@ boolean deadNodesContain(DatanodeInfo nodeInfo) {
224224
}
225225

226226
/**
227-
* Grab the open-file info from namenode
227+
* Grab the open-file info from namenode.
228228
* @param refreshLocatedBlocks whether to re-fetch locatedblocks
229229
*/
230230
void openInfo(boolean refreshLocatedBlocks) throws IOException {
@@ -940,7 +940,8 @@ private DNAddrPair chooseDataNode(LocatedBlock block,
940940
* @return Returns chosen DNAddrPair; Can be null if refetchIfRequired is
941941
* false.
942942
*/
943-
private DNAddrPair chooseDataNode(LocatedBlock block,
943+
@VisibleForTesting
944+
DNAddrPair chooseDataNode(LocatedBlock block,
944945
Collection<DatanodeInfo> ignoredNodes, boolean refetchIfRequired)
945946
throws IOException {
946947
while (true) {
@@ -955,6 +956,14 @@ private DNAddrPair chooseDataNode(LocatedBlock block,
955956
}
956957
}
957958

959+
/**
960+
* RefetchLocations should only be called when there are no active requests
961+
* to datanodes. In the hedged read case this means futures should be empty.
962+
* @param block The locatedBlock to get new datanode locations for.
963+
* @param ignoredNodes A list of ignored nodes. This list can be null and can be cleared.
964+
* @return the locatedBlock with updated datanode locations.
965+
* @throws IOException
966+
*/
958967
private LocatedBlock refetchLocations(LocatedBlock block,
959968
Collection<DatanodeInfo> ignoredNodes) throws IOException {
960969
String errMsg = getBestNodeDNAddrPairErrorString(block.getLocations(),
@@ -999,13 +1008,24 @@ private LocatedBlock refetchLocations(LocatedBlock block,
9991008
throw new InterruptedIOException(
10001009
"Interrupted while choosing DataNode for read.");
10011010
}
1002-
clearLocalDeadNodes(); //2nd option is to remove only nodes[blockId]
1011+
clearCachedNodeState(ignoredNodes);
10031012
openInfo(true);
10041013
block = refreshLocatedBlock(block);
10051014
failures++;
10061015
return block;
10071016
}
10081017

1018+
/**
1019+
* Clear both the dead nodes and the ignored nodes
1020+
* @param ignoredNodes is cleared
1021+
*/
1022+
private void clearCachedNodeState(Collection<DatanodeInfo> ignoredNodes) {
1023+
clearLocalDeadNodes(); //2nd option is to remove only nodes[blockId]
1024+
if (ignoredNodes != null) {
1025+
ignoredNodes.clear();
1026+
}
1027+
}
1028+
10091029
/**
10101030
* Get the best node from which to stream the data.
10111031
* @param block LocatedBlock, containing nodes in priority order.
@@ -1337,8 +1357,12 @@ private void hedgedFetchBlockByteRange(LocatedBlock block, long start,
13371357
} catch (InterruptedException ie) {
13381358
// Ignore and retry
13391359
}
1340-
if (refetch) {
1341-
refetchLocations(block, ignored);
1360+
// If refetch is true, then all nodes are in deadNodes or ignoredNodes.
1361+
// We should loop through all futures and remove them, so we do not
1362+
// have concurrent requests to the same node.
1363+
// Once all futures are cleared, we can clear the ignoredNodes and retry.
1364+
if (refetch && futures.isEmpty()) {
1365+
block = refetchLocations(block, ignored);
13421366
}
13431367
// We got here if exception. Ignore this node on next go around IFF
13441368
// we found a chosenNode to hedge read against.

hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RBFConfigKeys.java

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -201,6 +201,10 @@ public class RBFConfigKeys extends CommonConfigurationKeysPublic {
201201
FEDERATION_ROUTER_PREFIX + "observer.federated.state.propagation.maxsize";
202202
public static final int DFS_ROUTER_OBSERVER_FEDERATED_STATE_PROPAGATION_MAXSIZE_DEFAULT = 5;
203203

204+
public static final String DFS_ROUTER_OBSERVER_STATE_ID_REFRESH_PERIOD_KEY =
205+
FEDERATION_ROUTER_PREFIX + "observer.state.id.refresh.period";
206+
public static final String DFS_ROUTER_OBSERVER_STATE_ID_REFRESH_PERIOD_DEFAULT = "15s";
207+
204208
public static final String FEDERATION_STORE_SERIALIZER_CLASS =
205209
FEDERATION_STORE_PREFIX + "serializer";
206210
public static final Class<StateStoreSerializerPBImpl>

hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterRpcClient.java

Lines changed: 52 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,7 @@
5757
import java.util.concurrent.ThreadFactory;
5858
import java.util.concurrent.ThreadPoolExecutor;
5959
import java.util.concurrent.TimeUnit;
60+
import java.util.concurrent.atomic.LongAccumulator;
6061
import java.util.concurrent.atomic.LongAdder;
6162
import java.util.regex.Matcher;
6263
import java.util.regex.Pattern;
@@ -86,6 +87,7 @@
8687
import org.apache.hadoop.net.NetUtils;
8788
import org.apache.hadoop.security.UserGroupInformation;
8889
import org.apache.hadoop.util.StringUtils;
90+
import org.apache.hadoop.util.Time;
8991
import org.eclipse.jetty.util.ajax.JSON;
9092
import org.slf4j.Logger;
9193
import org.slf4j.LoggerFactory;
@@ -136,6 +138,14 @@ public class RouterRpcClient {
136138
private final boolean observerReadEnabledDefault;
137139
/** Nameservice specific overrides of the default setting for enabling observer reads. */
138140
private HashSet<String> observerReadEnabledOverrides = new HashSet<>();
141+
/**
142+
* Period to refresh namespace stateID using active namenode.
143+
* This ensures the namespace stateID is fresh even when an
144+
* observer is trailing behind.
145+
*/
146+
private long activeNNStateIdRefreshPeriodMs;
147+
/** Last msync times for each namespace. */
148+
private final ConcurrentHashMap<String, LongAccumulator> lastActiveNNRefreshTimes;
139149

140150
/** Pattern to parse a stack trace line. */
141151
private static final Pattern STACK_TRACE_PATTERN =
@@ -211,13 +221,25 @@ public RouterRpcClient(Configuration conf, Router router,
211221
this.observerReadEnabledDefault = conf.getBoolean(
212222
RBFConfigKeys.DFS_ROUTER_OBSERVER_READ_DEFAULT_KEY,
213223
RBFConfigKeys.DFS_ROUTER_OBSERVER_READ_DEFAULT_VALUE);
214-
String[] observerReadOverrides = conf.getStrings(RBFConfigKeys.DFS_ROUTER_OBSERVER_READ_OVERRIDES);
224+
String[] observerReadOverrides =
225+
conf.getStrings(RBFConfigKeys.DFS_ROUTER_OBSERVER_READ_OVERRIDES);
215226
if (observerReadOverrides != null) {
216227
observerReadEnabledOverrides.addAll(Arrays.asList(observerReadOverrides));
217228
}
218229
if (this.observerReadEnabledDefault) {
219230
LOG.info("Observer read is enabled for router.");
220231
}
232+
this.activeNNStateIdRefreshPeriodMs = conf.getTimeDuration(
233+
RBFConfigKeys.DFS_ROUTER_OBSERVER_STATE_ID_REFRESH_PERIOD_KEY,
234+
RBFConfigKeys.DFS_ROUTER_OBSERVER_STATE_ID_REFRESH_PERIOD_DEFAULT,
235+
TimeUnit.SECONDS, TimeUnit.MILLISECONDS);
236+
if (activeNNStateIdRefreshPeriodMs < 0) {
237+
LOG.info("Periodic stateId freshness check is disabled"
238+
+ " since '{}' is {}ms, which is less than 0.",
239+
RBFConfigKeys.DFS_ROUTER_OBSERVER_STATE_ID_REFRESH_PERIOD_KEY,
240+
activeNNStateIdRefreshPeriodMs);
241+
}
242+
this.lastActiveNNRefreshTimes = new ConcurrentHashMap<>();
221243
}
222244

223245
/**
@@ -1707,10 +1729,13 @@ private List<? extends FederationNamenodeContext> getOrderedNamenodes(String nsI
17071729
boolean isObserverRead) throws IOException {
17081730
final List<? extends FederationNamenodeContext> namenodes;
17091731

1710-
if (RouterStateIdContext.getClientStateIdFromCurrentCall(nsId) > Long.MIN_VALUE) {
1711-
namenodes = namenodeResolver.getNamenodesForNameserviceId(nsId, isObserverRead);
1712-
} else {
1713-
namenodes = namenodeResolver.getNamenodesForNameserviceId(nsId, false);
1732+
boolean listObserverNamenodesFirst = isObserverRead
1733+
&& isNamespaceStateIdFresh(nsId)
1734+
&& (RouterStateIdContext.getClientStateIdFromCurrentCall(nsId) > Long.MIN_VALUE);
1735+
namenodes = namenodeResolver.getNamenodesForNameserviceId(nsId, listObserverNamenodesFirst);
1736+
if (!listObserverNamenodesFirst) {
1737+
// Refresh time of last call to active NameNode.
1738+
getTimeOfLastCallToActive(nsId).accumulate(Time.monotonicNow());
17141739
}
17151740

17161741
if (namenodes == null || namenodes.isEmpty()) {
@@ -1721,7 +1746,8 @@ private List<? extends FederationNamenodeContext> getOrderedNamenodes(String nsI
17211746
}
17221747

17231748
private boolean isObserverReadEligible(String nsId, Method method) {
1724-
boolean isReadEnabledForNamespace = observerReadEnabledDefault != observerReadEnabledOverrides.contains(nsId);
1749+
boolean isReadEnabledForNamespace =
1750+
observerReadEnabledDefault != observerReadEnabledOverrides.contains(nsId);
17251751
return isReadEnabledForNamespace && isReadCall(method);
17261752
}
17271753

@@ -1735,4 +1761,24 @@ private static boolean isReadCall(Method method) {
17351761
}
17361762
return !method.getAnnotationsByType(ReadOnly.class)[0].activeOnly();
17371763
}
1764+
1765+
/**
1766+
* Checks and sets last refresh time for a namespace's stateId.
1767+
* Returns true if refresh time is newer than threshold.
1768+
* Otherwise, return false and call should be handled by active namenode.
1769+
* @param nsId namespaceID
1770+
*/
1771+
@VisibleForTesting
1772+
boolean isNamespaceStateIdFresh(String nsId) {
1773+
if (activeNNStateIdRefreshPeriodMs < 0) {
1774+
return true;
1775+
}
1776+
long timeSinceRefreshMs = Time.monotonicNow() - getTimeOfLastCallToActive(nsId).get();
1777+
return (timeSinceRefreshMs <= activeNNStateIdRefreshPeriodMs);
1778+
}
1779+
1780+
private LongAccumulator getTimeOfLastCallToActive(String namespaceId) {
1781+
return lastActiveNNRefreshTimes
1782+
.computeIfAbsent(namespaceId, key -> new LongAccumulator(Math::max, 0));
1783+
}
17381784
}

0 commit comments

Comments
 (0)