Skip to content

Commit e93e92d

Browse files
authored
Merge branch 'apache:trunk' into YARN-11313
2 parents d8df8ba + 4891bf5 commit e93e92d

File tree

71 files changed

+1970
-471
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

71 files changed

+1970
-471
lines changed

LICENSE-binary

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -362,7 +362,7 @@ org.ehcache:ehcache:3.3.1
362362
org.lz4:lz4-java:1.7.1
363363
org.objenesis:objenesis:2.6
364364
org.xerial.snappy:snappy-java:1.0.5
365-
org.yaml:snakeyaml:1.31:
365+
org.yaml:snakeyaml:1.32
366366
org.wildfly.openssl:wildfly-openssl:1.0.7.Final
367367

368368

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfServlet.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ static void writeResponse(Configuration conf,
9898
if (FORMAT_JSON.equals(format)) {
9999
Configuration.dumpConfiguration(conf, propertyName, out);
100100
} else if (FORMAT_XML.equals(format)) {
101-
conf.writeXml(propertyName, out);
101+
conf.writeXml(propertyName, out, conf);
102102
} else {
103103
throw new BadFormatException("Bad format: " + format);
104104
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@
3737
public class ConfigRedactor {
3838

3939
private static final String REDACTED_TEXT = "<redacted>";
40+
private static final String REDACTED_XML = "******";
4041

4142
private List<Pattern> compiledPatterns;
4243

@@ -84,4 +85,19 @@ private boolean configIsSensitive(String key) {
8485
}
8586
return false;
8687
}
88+
89+
/**
90+
* Given a key / value pair, decides whether or not to redact and returns
91+
* either the original value or text indicating it has been redacted.
92+
*
93+
* @param key param key.
94+
* @param value param value, will return if conditions permit.
95+
* @return Original value, or text indicating it has been redacted
96+
*/
97+
public String redactXml(String key, String value) {
98+
if (configIsSensitive(key)) {
99+
return REDACTED_XML;
100+
}
101+
return value;
102+
}
87103
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java

Lines changed: 19 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -3593,11 +3593,13 @@ public void writeXml(Writer out) throws IOException {
35933593
* </ul>
35943594
* @param propertyName xml property name.
35953595
* @param out the writer to write to.
3596+
* @param config configuration.
35963597
* @throws IOException raised on errors performing I/O.
35973598
*/
3598-
public void writeXml(@Nullable String propertyName, Writer out)
3599+
public void writeXml(@Nullable String propertyName, Writer out, Configuration config)
35993600
throws IOException, IllegalArgumentException {
3600-
Document doc = asXmlDocument(propertyName);
3601+
ConfigRedactor redactor = config != null ? new ConfigRedactor(this) : null;
3602+
Document doc = asXmlDocument(propertyName, redactor);
36013603

36023604
try {
36033605
DOMSource source = new DOMSource(doc);
@@ -3614,11 +3616,16 @@ public void writeXml(@Nullable String propertyName, Writer out)
36143616
}
36153617
}
36163618

3619+
public void writeXml(@Nullable String propertyName, Writer out)
3620+
throws IOException, IllegalArgumentException {
3621+
writeXml(propertyName, out, null);
3622+
}
3623+
36173624
/**
36183625
* Return the XML DOM corresponding to this Configuration.
36193626
*/
3620-
private synchronized Document asXmlDocument(@Nullable String propertyName)
3621-
throws IOException, IllegalArgumentException {
3627+
private synchronized Document asXmlDocument(@Nullable String propertyName,
3628+
ConfigRedactor redactor) throws IOException, IllegalArgumentException {
36223629
Document doc;
36233630
try {
36243631
doc = DocumentBuilderFactory
@@ -3641,13 +3648,13 @@ private synchronized Document asXmlDocument(@Nullable String propertyName)
36413648
propertyName + " not found");
36423649
} else {
36433650
// given property is found, write single property
3644-
appendXMLProperty(doc, conf, propertyName);
3651+
appendXMLProperty(doc, conf, propertyName, redactor);
36453652
conf.appendChild(doc.createTextNode("\n"));
36463653
}
36473654
} else {
36483655
// append all elements
36493656
for (Enumeration<Object> e = properties.keys(); e.hasMoreElements();) {
3650-
appendXMLProperty(doc, conf, (String)e.nextElement());
3657+
appendXMLProperty(doc, conf, (String)e.nextElement(), redactor);
36513658
conf.appendChild(doc.createTextNode("\n"));
36523659
}
36533660
}
@@ -3663,7 +3670,7 @@ private synchronized Document asXmlDocument(@Nullable String propertyName)
36633670
* @param propertyName
36643671
*/
36653672
private synchronized void appendXMLProperty(Document doc, Element conf,
3666-
String propertyName) {
3673+
String propertyName, ConfigRedactor redactor) {
36673674
// skip writing if given property name is empty or null
36683675
if (!Strings.isNullOrEmpty(propertyName)) {
36693676
String value = properties.getProperty(propertyName);
@@ -3676,8 +3683,11 @@ private synchronized void appendXMLProperty(Document doc, Element conf,
36763683
propNode.appendChild(nameNode);
36773684

36783685
Element valueNode = doc.createElement("value");
3679-
valueNode.appendChild(doc.createTextNode(
3680-
properties.getProperty(propertyName)));
3686+
String propertyValue = properties.getProperty(propertyName);
3687+
if (redactor != null) {
3688+
propertyValue = redactor.redactXml(propertyName, propertyValue);
3689+
}
3690+
valueNode.appendChild(doc.createTextNode(propertyValue));
36813691
propNode.appendChild(valueNode);
36823692

36833693
Element finalNode = doc.createElement("final");

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1000,6 +1000,7 @@ public class CommonConfigurationKeysPublic {
10001000
String.join(",",
10011001
"secret$",
10021002
"password$",
1003+
"username$",
10031004
"ssl.keystore.pass$",
10041005
"fs.s3.*[Ss]ecret.?[Kk]ey",
10051006
"fs.s3a.*.server-side-encryption.key",

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StreamCapabilities.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ public interface StreamCapabilities {
8484
* Support for vectored IO api.
8585
* See {@code PositionedReadable#readVectored(List, IntFunction)}.
8686
*/
87-
String VECTOREDIO = "readvectored";
87+
String VECTOREDIO = "in:readvectored";
8888

8989
/**
9090
* Stream abort() capability implemented by {@link Abortable#abort()}.

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,10 @@
2323
import org.apache.hadoop.classification.InterfaceStability;
2424
import org.apache.hadoop.conf.Configuration;
2525
import org.apache.hadoop.conf.Configured;
26+
import org.apache.hadoop.fs.viewfs.ViewFileSystem;
2627
import org.slf4j.Logger;
2728
import org.slf4j.LoggerFactory;
29+
import static org.apache.hadoop.fs.viewfs.Constants.*;
2830

2931
/**
3032
* Provides a trash facility which supports pluggable Trash policies.
@@ -94,6 +96,27 @@ public static boolean moveToAppropriateTrash(FileSystem fs, Path p,
9496
LOG.warn("Failed to get server trash configuration", e);
9597
throw new IOException("Failed to get server trash configuration", e);
9698
}
99+
100+
/*
101+
* In HADOOP-18144, we changed getTrashRoot() in ViewFileSystem to return a
102+
* viewFS path, instead of a targetFS path. moveToTrash works for
103+
* ViewFileSystem now. ViewFileSystem will do path resolution internally by
104+
* itself.
105+
*
106+
* When localized trash flag is enabled:
107+
* 1). if fs is a ViewFileSystem, we can initialize Trash() with a
108+
* ViewFileSystem object;
109+
* 2). When fs is not a ViewFileSystem, the only place we would need to
110+
* resolve a path is for symbolic links. However, symlink is not
111+
* enabled in Hadoop due to the complexity to support it
112+
* (HADOOP-10019).
113+
*/
114+
if (conf.getBoolean(CONFIG_VIEWFS_TRASH_FORCE_INSIDE_MOUNT_POINT,
115+
CONFIG_VIEWFS_TRASH_FORCE_INSIDE_MOUNT_POINT_DEFAULT)) {
116+
Trash trash = new Trash(fs, conf);
117+
return trash.moveToTrash(p);
118+
}
119+
97120
Trash trash = new Trash(fullyResolvedFs, conf);
98121
return trash.moveToTrash(fullyResolvedPath);
99122
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/WeakReferenceThreadMap.java

Lines changed: 33 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,8 @@
2525

2626
import org.apache.hadoop.util.WeakReferenceMap;
2727

28+
import static java.util.Objects.requireNonNull;
29+
2830
/**
2931
* A WeakReferenceMap for threads.
3032
* @param <V> value type of the map
@@ -36,30 +38,55 @@ public WeakReferenceThreadMap(final Function<? super Long, ? extends V> factory,
3638
super(factory, referenceLost);
3739
}
3840

41+
/**
42+
* Get the value for the current thread, creating if needed.
43+
* @return an instance.
44+
*/
3945
public V getForCurrentThread() {
4046
return get(currentThreadId());
4147
}
4248

49+
/**
50+
* Remove the reference for the current thread.
51+
* @return any reference value which existed.
52+
*/
4353
public V removeForCurrentThread() {
4454
return remove(currentThreadId());
4555
}
4656

57+
/**
58+
* Get the current thread ID.
59+
* @return thread ID.
60+
*/
4761
public long currentThreadId() {
4862
return Thread.currentThread().getId();
4963
}
5064

65+
/**
66+
* Set the new value for the current thread.
67+
* @param newVal new reference to set for the active thread.
68+
* @return the previously set value, possibly null
69+
*/
5170
public V setForCurrentThread(V newVal) {
71+
requireNonNull(newVal);
5272
long id = currentThreadId();
5373

5474
// if the same object is already in the map, just return it.
55-
WeakReference<V> ref = lookup(id);
56-
// Reference value could be set to null. Thus, ref.get() could return
57-
// null. Should be handled accordingly while using the returned value.
58-
if (ref != null && ref.get() == newVal) {
59-
return ref.get();
75+
WeakReference<V> existingWeakRef = lookup(id);
76+
77+
// The looked up reference could be one of
78+
// 1. null: nothing there
79+
// 2. valid but get() == null : reference lost by GC.
80+
// 3. different from the new value
81+
// 4. the same as the old value
82+
if (resolve(existingWeakRef) == newVal) {
83+
// case 4: do nothing, return the new value
84+
return newVal;
85+
} else {
86+
// cases 1, 2, 3: update the map and return the old value
87+
return put(id, newVal);
6088
}
6189

62-
return put(id, newVal);
6390
}
6491

6592
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsContext.java

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,8 @@
2020

2121
import org.apache.hadoop.fs.statistics.impl.IOStatisticsContextIntegration;
2222

23+
import static java.util.Objects.requireNonNull;
24+
2325
/**
2426
* An interface defined to capture thread-level IOStatistics by using per
2527
* thread context.
@@ -67,7 +69,11 @@ public interface IOStatisticsContext extends IOStatisticsSource {
6769
* @return instance of IOStatisticsContext for the context.
6870
*/
6971
static IOStatisticsContext getCurrentIOStatisticsContext() {
70-
return IOStatisticsContextIntegration.getCurrentIOStatisticsContext();
72+
// the null check is just a safety check to highlight exactly where a null value would
73+
// be returned if HADOOP-18456 has resurfaced.
74+
return requireNonNull(
75+
IOStatisticsContextIntegration.getCurrentIOStatisticsContext(),
76+
"Null IOStatisticsContext");
7177
}
7278

7379
/**

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsContextIntegration.java

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,10 @@ private IOStatisticsContextIntegration() {}
100100
* @return an instance of IOStatisticsContext.
101101
*/
102102
private static IOStatisticsContext createNewInstance(Long key) {
103-
return new IOStatisticsContextImpl(key, INSTANCE_ID.getAndIncrement());
103+
IOStatisticsContextImpl instance =
104+
new IOStatisticsContextImpl(key, INSTANCE_ID.getAndIncrement());
105+
LOG.debug("Created instance {}", instance);
106+
return instance;
104107
}
105108

106109
/**
@@ -131,9 +134,11 @@ public static void setThreadIOStatisticsContext(
131134
IOStatisticsContext statisticsContext) {
132135
if (isThreadIOStatsEnabled) {
133136
if (statisticsContext == null) {
137+
// new value is null, so remove it
134138
ACTIVE_IOSTATS_CONTEXT.removeForCurrentThread();
135-
}
136-
if (ACTIVE_IOSTATS_CONTEXT.getForCurrentThread() != statisticsContext) {
139+
} else {
140+
// the setter is efficient in that it does not create a new
141+
// reference if the context is unchanged.
137142
ACTIVE_IOSTATS_CONTEXT.setForCurrentThread(statisticsContext);
138143
}
139144
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -124,12 +124,28 @@ public static void setCallIdAndRetryCount(int cid, int rc,
124124
Preconditions.checkArgument(cid != RpcConstants.INVALID_CALL_ID);
125125
Preconditions.checkState(callId.get() == null);
126126
Preconditions.checkArgument(rc != RpcConstants.INVALID_RETRY_COUNT);
127+
setCallIdAndRetryCountUnprotected(cid, rc, externalHandler);
128+
}
127129

130+
public static void setCallIdAndRetryCountUnprotected(Integer cid, int rc,
131+
Object externalHandler) {
128132
callId.set(cid);
129133
retryCount.set(rc);
130134
EXTERNAL_CALL_HANDLER.set(externalHandler);
131135
}
132136

137+
public static int getCallId() {
138+
return callId.get() != null ? callId.get() : nextCallId();
139+
}
140+
141+
public static int getRetryCount() {
142+
return retryCount.get() != null ? retryCount.get() : 0;
143+
}
144+
145+
public static Object getExternalHandler() {
146+
return EXTERNAL_CALL_HANDLER.get();
147+
}
148+
133149
private final ConcurrentMap<ConnectionId, Connection> connections =
134150
new ConcurrentHashMap<>();
135151
private final Object putLock = new Object();

0 commit comments

Comments
 (0)