Skip to content

Commit d1547af

Browse files
authored
Merge branch 'apache:trunk' into YARN-11226-V3
2 parents 69ac19e + 6a07b5d commit d1547af

File tree

18 files changed

+205
-28
lines changed

18 files changed

+205
-28
lines changed

LICENSE-binary

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -251,7 +251,7 @@ commons-collections:commons-collections:3.2.2
251251
commons-daemon:commons-daemon:1.0.13
252252
commons-io:commons-io:2.8.0
253253
commons-logging:commons-logging:1.1.3
254-
commons-net:commons-net:3.8.0
254+
commons-net:commons-net:3.9.0
255255
de.ruedigermoeller:fst:2.50
256256
io.grpc:grpc-api:1.26.0
257257
io.grpc:grpc-context:1.26.0

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsStoreImpl.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -190,7 +190,7 @@ public long incrementCounter(final String key, final long value) {
190190
return counter.get();
191191
} else {
192192
long l = incAtomicLong(counter, value);
193-
LOG.debug("Incrementing counter {} by {} with final value {}",
193+
LOG.trace("Incrementing counter {} by {} with final value {}",
194194
key, value, l);
195195
return l;
196196
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogThrottlingHelper.java

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -262,9 +262,15 @@ public LogAction record(String recorderName, long currentTimeMs,
262262
if (primaryRecorderName.equals(recorderName) &&
263263
currentTimeMs - minLogPeriodMs >= lastLogTimestampMs) {
264264
lastLogTimestampMs = currentTimeMs;
265-
for (LoggingAction log : currentLogs.values()) {
266-
log.setShouldLog();
267-
}
265+
currentLogs.replaceAll((key, log) -> {
266+
LoggingAction newLog = log;
267+
if (log.hasLogged()) {
268+
// create a fresh log since the old one has already been logged
269+
newLog = new LoggingAction(log.getValueCount());
270+
}
271+
newLog.setShouldLog();
272+
return newLog;
273+
});
268274
}
269275
if (currentLog.shouldLog()) {
270276
currentLog.setHasLogged();
@@ -357,6 +363,10 @@ private void setHasLogged() {
357363
hasLogged = true;
358364
}
359365

366+
private int getValueCount() {
367+
return stats.length;
368+
}
369+
360370
private void recordValues(double... values) {
361371
if (values.length != stats.length) {
362372
throw new IllegalArgumentException("received " + values.length +

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,8 @@
3838
import org.slf4j.Logger;
3939
import org.slf4j.LoggerFactory;
4040

41+
import static org.apache.hadoop.util.Shell.bashQuote;
42+
4143
/**
4244
* A simple shell-based implementation of {@link IdMappingServiceProvider}
4345
* Map id to user name or group name. It does update every 15 minutes. Only a
@@ -472,26 +474,27 @@ synchronized private void updateMapIncr(final String name,
472474

473475
boolean updated = false;
474476
updateStaticMapping();
477+
String name2 = bashQuote(name);
475478

476479
if (OS.startsWith("Linux") || OS.equals("SunOS") || OS.contains("BSD")) {
477480
if (isGrp) {
478481
updated = updateMapInternal(gidNameMap, "group",
479-
getName2IdCmdNIX(name, true), ":",
482+
getName2IdCmdNIX(name2, true), ":",
480483
staticMapping.gidMapping);
481484
} else {
482485
updated = updateMapInternal(uidNameMap, "user",
483-
getName2IdCmdNIX(name, false), ":",
486+
getName2IdCmdNIX(name2, false), ":",
484487
staticMapping.uidMapping);
485488
}
486489
} else {
487490
// Mac
488491
if (isGrp) {
489492
updated = updateMapInternal(gidNameMap, "group",
490-
getName2IdCmdMac(name, true), "\\s+",
493+
getName2IdCmdMac(name2, true), "\\s+",
491494
staticMapping.gidMapping);
492495
} else {
493496
updated = updateMapInternal(uidNameMap, "user",
494-
getName2IdCmdMac(name, false), "\\s+",
497+
getName2IdCmdMac(name2, false), "\\s+",
495498
staticMapping.uidMapping);
496499
}
497500
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,8 @@ public static void checkWindowsCommandLineLength(String...commands)
146146
* @param arg the argument to quote
147147
* @return the quoted string
148148
*/
149-
static String bashQuote(String arg) {
149+
@InterfaceAudience.Private
150+
public static String bashQuote(String arg) {
150151
StringBuilder buffer = new StringBuilder(arg.length() + 2);
151152
buffer.append('\'')
152153
.append(arg.replace("'", "'\\''"))

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/XMLUtils.java

Lines changed: 30 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,8 @@
2929
import org.apache.hadoop.classification.InterfaceAudience;
3030
import org.apache.hadoop.classification.InterfaceStability;
3131

32+
import org.slf4j.Logger;
33+
import org.slf4j.LoggerFactory;
3234
import org.xml.sax.SAXException;
3335

3436
import java.io.*;
@@ -41,6 +43,9 @@
4143
@InterfaceStability.Unstable
4244
public class XMLUtils {
4345

46+
private static final Logger LOG =
47+
LoggerFactory.getLogger(XMLUtils.class);
48+
4449
public static final String DISALLOW_DOCTYPE_DECL =
4550
"http://apache.org/xml/features/disallow-doctype-decl";
4651
public static final String LOAD_EXTERNAL_DECL =
@@ -138,8 +143,8 @@ public static TransformerFactory newSecureTransformerFactory()
138143
throws TransformerConfigurationException {
139144
TransformerFactory trfactory = TransformerFactory.newInstance();
140145
trfactory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true);
141-
trfactory.setAttribute(XMLConstants.ACCESS_EXTERNAL_DTD, "");
142-
trfactory.setAttribute(XMLConstants.ACCESS_EXTERNAL_STYLESHEET, "");
146+
bestEffortSetAttribute(trfactory, XMLConstants.ACCESS_EXTERNAL_DTD, "");
147+
bestEffortSetAttribute(trfactory, XMLConstants.ACCESS_EXTERNAL_STYLESHEET, "");
143148
return trfactory;
144149
}
145150

@@ -156,8 +161,29 @@ public static SAXTransformerFactory newSecureSAXTransformerFactory()
156161
throws TransformerConfigurationException {
157162
SAXTransformerFactory trfactory = (SAXTransformerFactory) SAXTransformerFactory.newInstance();
158163
trfactory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true);
159-
trfactory.setAttribute(XMLConstants.ACCESS_EXTERNAL_DTD, "");
160-
trfactory.setAttribute(XMLConstants.ACCESS_EXTERNAL_STYLESHEET, "");
164+
bestEffortSetAttribute(trfactory, XMLConstants.ACCESS_EXTERNAL_DTD, "");
165+
bestEffortSetAttribute(trfactory, XMLConstants.ACCESS_EXTERNAL_STYLESHEET, "");
161166
return trfactory;
162167
}
168+
169+
/**
170+
* Set an attribute value on a {@link TransformerFactory}. If the TransformerFactory
171+
* does not support the attribute, the method just returns <code>false</code> and
172+
* logs the issue at debug level.
173+
*
174+
* @param transformerFactory to update
175+
* @param name of the attribute to set
176+
* @param value to set on the attribute
177+
* @return whether the attribute was successfully set
178+
*/
179+
static boolean bestEffortSetAttribute(TransformerFactory transformerFactory,
180+
String name, Object value) {
181+
try {
182+
transformerFactory.setAttribute(name, value);
183+
return true;
184+
} catch (Throwable t) {
185+
LOG.debug("Issue setting TransformerFactory attribute {}: {}", name, t.toString());
186+
}
187+
return false;
188+
}
163189
}

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogThrottlingHelper.java

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -142,6 +142,18 @@ public void testPrimaryAndDependentLoggers() {
142142
assertTrue(helper.record("bar", 0).shouldLog());
143143
}
144144

145+
@Test
146+
public void testInfrequentPrimaryAndDependentLoggers() {
147+
helper = new LogThrottlingHelper(LOG_PERIOD, "foo", timer);
148+
149+
assertTrue(helper.record("foo", 0).shouldLog());
150+
assertTrue(helper.record("bar", 0).shouldLog());
151+
152+
// Both should log once the period has elapsed
153+
assertTrue(helper.record("foo", LOG_PERIOD).shouldLog());
154+
assertTrue(helper.record("bar", LOG_PERIOD).shouldLog());
155+
}
156+
145157
@Test
146158
public void testMultipleLoggersWithValues() {
147159
helper = new LogThrottlingHelper(LOG_PERIOD, "foo", timer);

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestXMLUtils.java

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,17 +20,20 @@
2020
import java.io.InputStream;
2121
import java.io.StringReader;
2222
import java.io.StringWriter;
23+
import javax.xml.XMLConstants;
2324
import javax.xml.parsers.DocumentBuilder;
2425
import javax.xml.parsers.SAXParser;
2526
import javax.xml.transform.Transformer;
2627
import javax.xml.transform.TransformerException;
28+
import javax.xml.transform.TransformerFactory;
2729
import javax.xml.transform.dom.DOMSource;
2830
import javax.xml.transform.stream.StreamResult;
2931
import javax.xml.transform.stream.StreamSource;
3032

3133
import org.apache.hadoop.test.AbstractHadoopTestBase;
3234

3335
import org.assertj.core.api.Assertions;
36+
import org.junit.Assert;
3437
import org.junit.Test;
3538
import org.w3c.dom.Document;
3639
import org.xml.sax.InputSource;
@@ -128,6 +131,15 @@ public void testExternalDtdWithSecureSAXTransformerFactory() throws Exception {
128131
}
129132
}
130133

134+
@Test
135+
public void testBestEffortSetAttribute() throws Exception {
136+
TransformerFactory factory = TransformerFactory.newInstance();
137+
Assert.assertFalse("unexpected attribute results in return of false",
138+
XMLUtils.bestEffortSetAttribute(factory, "unsupportedAttribute false", "abc"));
139+
Assert.assertTrue("expected attribute results in return of false",
140+
XMLUtils.bestEffortSetAttribute(factory, XMLConstants.ACCESS_EXTERNAL_DTD, ""));
141+
}
142+
131143
private static InputStream getResourceStream(final String filename) {
132144
return TestXMLUtils.class.getResourceAsStream(filename);
133145
}

hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/KeyProviderCache.java

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -68,8 +68,11 @@ public void onRemoval(
6868
})
6969
.build();
7070

71-
ShutdownHookManager.get().addShutdownHook(new KeyProviderCacheFinalizer(),
72-
SHUTDOWN_HOOK_PRIORITY);
71+
// Register the shutdown hook when not in shutdown
72+
if (!ShutdownHookManager.get().isShutdownInProgress()) {
73+
ShutdownHookManager.get().addShutdownHook(
74+
new KeyProviderCacheFinalizer(), SHUTDOWN_HOOK_PRIORITY);
75+
}
7376
}
7477

7578
public KeyProvider get(final Configuration conf,

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -708,12 +708,12 @@ public String toString() {
708708

709709
Result newResult(ExitStatus exitStatus, long bytesLeftToMove, long bytesBeingMoved) {
710710
return new Result(exitStatus, bytesLeftToMove, bytesBeingMoved,
711-
dispatcher.getBytesMoved(), dispatcher.getBblocksMoved());
711+
dispatcher.getBytesMoved(), dispatcher.getBlocksMoved());
712712
}
713713

714714
Result newResult(ExitStatus exitStatus) {
715715
return new Result(exitStatus, -1, -1, dispatcher.getBytesMoved(),
716-
dispatcher.getBblocksMoved());
716+
dispatcher.getBlocksMoved());
717717
}
718718

719719
/** Run an iteration for all datanodes. */

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/Dispatcher.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,7 @@ synchronized int allocate(int n) {
164164
}
165165
}
166166

167-
/** Aloocate a single lot of items */
167+
/** Allocate a single lot of items. */
168168
int allocate() {
169169
return allocate(lotSize);
170170
}
@@ -1127,7 +1127,7 @@ long getBytesMoved() {
11271127
return nnc.getBytesMoved().get();
11281128
}
11291129

1130-
long getBblocksMoved() {
1130+
long getBlocksMoved() {
11311131
return nnc.getBlocksMoved().get();
11321132
}
11331133

@@ -1234,7 +1234,7 @@ public boolean dispatchAndCheckContinue() throws InterruptedException {
12341234
*/
12351235
private long dispatchBlockMoves() throws InterruptedException {
12361236
final long bytesLastMoved = getBytesMoved();
1237-
final long blocksLastMoved = getBblocksMoved();
1237+
final long blocksLastMoved = getBlocksMoved();
12381238
final Future<?>[] futures = new Future<?>[sources.size()];
12391239

12401240
int concurrentThreads = Math.min(sources.size(),
@@ -1284,7 +1284,7 @@ public void run() {
12841284
waitForMoveCompletion(targets);
12851285
LOG.info("Total bytes (blocks) moved in this iteration {} ({})",
12861286
StringUtils.byteDesc(getBytesMoved() - bytesLastMoved),
1287-
(getBblocksMoved() - blocksLastMoved));
1287+
(getBlocksMoved() - blocksLastMoved));
12881288

12891289
return getBytesMoved() - bytesLastMoved;
12901290
}

hadoop-project/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@
123123
<commons-logging.version>1.1.3</commons-logging.version>
124124
<commons-logging-api.version>1.1</commons-logging-api.version>
125125
<commons-math3.version>3.6.1</commons-math3.version>
126-
<commons-net.version>3.8.0</commons-net.version>
126+
<commons-net.version>3.9.0</commons-net.version>
127127
<commons-text.version>1.10.0</commons-text.version>
128128

129129
<kerby.version>2.0.2</kerby.version>

hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -117,6 +117,7 @@
117117
import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.FS_AZURE_BLOCK_UPLOAD_BUFFER_DIR;
118118
import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.BLOCK_UPLOAD_ACTIVE_BLOCKS_DEFAULT;
119119
import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.DATA_BLOCKS_BUFFER_DEFAULT;
120+
import static org.apache.hadoop.fs.azurebfs.constants.InternalConstants.CAPABILITY_SAFE_READAHEAD;
120121
import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapabilityArgs;
121122
import static org.apache.hadoop.fs.statistics.IOStatisticsLogging.logIOStatisticsAtLevel;
122123
import static org.apache.hadoop.util.functional.RemoteIterators.filteringRemoteIterator;
@@ -235,6 +236,7 @@ public String toString() {
235236
sb.append("uri=").append(uri);
236237
sb.append(", user='").append(abfsStore.getUser()).append('\'');
237238
sb.append(", primaryUserGroup='").append(abfsStore.getPrimaryGroup()).append('\'');
239+
sb.append("[" + CAPABILITY_SAFE_READAHEAD + "]");
238240
sb.append('}');
239241
return sb.toString();
240242
}
@@ -1636,6 +1638,11 @@ public boolean hasPathCapability(final Path path, final String capability)
16361638
new TracingContext(clientCorrelationId, fileSystemId,
16371639
FSOperationType.HAS_PATH_CAPABILITY, tracingHeaderFormat,
16381640
listener));
1641+
1642+
// probe for presence of the HADOOP-18546 readahead fix.
1643+
case CAPABILITY_SAFE_READAHEAD:
1644+
return true;
1645+
16391646
default:
16401647
return super.hasPathCapability(p, capability);
16411648
}
Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.hadoop.fs.azurebfs.constants;
20+
21+
import org.apache.hadoop.classification.InterfaceAudience;
22+
23+
/**
24+
* Constants which are used internally and which don't fit into the other
25+
* classes.
26+
* For use within the {@code hadoop-azure} module only.
27+
*/
28+
@InterfaceAudience.Private
29+
public final class InternalConstants {
30+
31+
private InternalConstants() {
32+
}
33+
34+
/**
35+
* Does this version of the store have safe readahead?
36+
* Possible combinations of this and the probe
37+
* {@code "fs.capability.etags.available"}.
38+
* <ol>
39+
* <li>{@value}: store is safe</li>
40+
* <li>no etags: store is safe</li>
41+
* <li>etags and not {@value}: store is <i>UNSAFE</i></li>
42+
* </ol>
43+
*/
44+
public static final String CAPABILITY_SAFE_READAHEAD =
45+
"fs.azure.capability.readahead.safe";
46+
}

hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsInputStream.java

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@
5050

5151
import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.ONE_KB;
5252
import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.STREAM_ID_LEN;
53+
import static org.apache.hadoop.fs.azurebfs.constants.InternalConstants.CAPABILITY_SAFE_READAHEAD;
5354
import static org.apache.hadoop.util.StringUtils.toLowerCase;
5455

5556
/**
@@ -828,11 +829,12 @@ public IOStatistics getIOStatistics() {
828829
@Override
829830
public String toString() {
830831
final StringBuilder sb = new StringBuilder(super.toString());
832+
sb.append("AbfsInputStream@(").append(this.hashCode()).append("){");
833+
sb.append("[" + CAPABILITY_SAFE_READAHEAD + "]");
831834
if (streamStatistics != null) {
832-
sb.append("AbfsInputStream@(").append(this.hashCode()).append("){");
833-
sb.append(streamStatistics.toString());
834-
sb.append("}");
835+
sb.append(", ").append(streamStatistics);
835836
}
837+
sb.append("}");
836838
return sb.toString();
837839
}
838840

hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/ReadBufferManager.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -101,6 +101,7 @@ private void init() {
101101

102102
// hide instance constructor
103103
private ReadBufferManager() {
104+
LOGGER.trace("Creating readbuffer manager with HADOOP-18546 patch");
104105
}
105106

106107

0 commit comments

Comments
 (0)