Skip to content

Commit b7cfc3f

Browse files
authored
Merge branch 'apache:trunk' into YARN-11509
2 parents f75048c + 8fb611c commit b7cfc3f

File tree

38 files changed

+106622
-122
lines changed

38 files changed

+106622
-122
lines changed

LICENSE-binary

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -337,7 +337,7 @@ org.apache.kerby:kerby-pkix:2.0.3
337337
org.apache.kerby:kerby-util:2.0.3
338338
org.apache.kerby:kerby-xdr:2.0.3
339339
org.apache.kerby:token-provider:2.0.3
340-
org.apache.solr:solr-solrj:8.8.2
340+
org.apache.solr:solr-solrj:8.11.2
341341
org.apache.yetus:audience-annotations:0.5.0
342342
org.apache.zookeeper:zookeeper:3.6.3
343343
org.codehaus.jettison:jettison:1.5.4
@@ -361,7 +361,7 @@ org.jetbrains.kotlin:kotlin-stdlib:1.4.10
361361
org.jetbrains.kotlin:kotlin-stdlib-common:1.4.10
362362
org.lz4:lz4-java:1.7.1
363363
org.objenesis:objenesis:2.6
364-
org.xerial.snappy:snappy-java:1.0.5
364+
org.xerial.snappy:snappy-java:1.1.10.1
365365
org.yaml:snakeyaml:2.0
366366
org.wildfly.openssl:wildfly-openssl:1.1.3.Final
367367

hadoop-common-project/hadoop-common/dev-support/jdiff/Apache_Hadoop_Common_3.3.6.xml

Lines changed: 40994 additions & 0 deletions
Large diffs are not rendered by default.

hadoop-common-project/hadoop-common/src/main/conf/log4j.properties

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -232,6 +232,21 @@ log4j.appender.NMAUDIT.layout.ConversionPattern=%d{ISO8601}%p %c{2}: %m%n
232232
log4j.appender.NMAUDIT.MaxFileSize=${nm.audit.log.maxfilesize}
233233
log4j.appender.NMAUDIT.MaxBackupIndex=${nm.audit.log.maxbackupindex}
234234

235+
#
236+
# YARN Router audit logging
237+
#
238+
router.audit.logger=INFO,NullAppender
239+
router.audit.log.maxfilesize=256MB
240+
router.audit.log.maxbackupindex=20
241+
log4j.logger.org.apache.hadoop.yarn.server.router.RouterAuditLogger=${router.audit.logger}
242+
log4j.additivity.org.apache.hadoop.yarn.server.router.RouterAuditLogger=false
243+
log4j.appender.ROUTERAUDIT=org.apache.log4j.RollingFileAppender
244+
log4j.appender.ROUTERAUDIT.File=${hadoop.log.dir}/router-audit.log
245+
log4j.appender.ROUTERAUDIT.layout=org.apache.log4j.PatternLayout
246+
log4j.appender.ROUTERAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
247+
log4j.appender.ROUTERAUDIT.MaxFileSize=${router.audit.log.maxfilesize}
248+
log4j.appender.ROUTERAUDIT.MaxBackupIndex=${router.audit.log.maxbackupindex}
249+
235250
# HS audit log configs
236251
#mapreduce.hs.audit.logger=INFO,HSAUDIT
237252
#log4j.logger.org.apache.hadoop.mapreduce.v2.hs.HSAuditLogger=${mapreduce.hs.audit.logger}

hadoop-common-project/hadoop-common/src/main/resources/core-default.xml

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2165,9 +2165,11 @@ The switch to turn S3A auditing on or off.
21652165

21662166
<property>
21672167
<name>fs.azure.buffer.dir</name>
2168-
<value>${hadoop.tmp.dir}/abfs</value>
2168+
<value>${env.LOCAL_DIRS:-${hadoop.tmp.dir}}/abfs</value>
21692169
<description>Directory path for buffer files needed to upload data blocks
2170-
in AbfsOutputStream.</description>
2170+
in AbfsOutputStream.
2171+
Yarn container path will be used as default value on yarn applications,
2172+
otherwise fall back to hadoop.tmp.dir </description>
21712173
</property>
21722174

21732175
<property>

hadoop-common-project/hadoop-common/src/site/markdown/release/3.3.6/CHANGELOG.3.3.6.md

Lines changed: 176 additions & 0 deletions
Large diffs are not rendered by default.
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
2+
<!---
3+
# Licensed to the Apache Software Foundation (ASF) under one
4+
# or more contributor license agreements. See the NOTICE file
5+
# distributed with this work for additional information
6+
# regarding copyright ownership. The ASF licenses this file
7+
# to you under the Apache License, Version 2.0 (the
8+
# "License"); you may not use this file except in compliance
9+
# with the License. You may obtain a copy of the License at
10+
#
11+
# http://www.apache.org/licenses/LICENSE-2.0
12+
#
13+
# Unless required by applicable law or agreed to in writing, software
14+
# distributed under the License is distributed on an "AS IS" BASIS,
15+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16+
# See the License for the specific language governing permissions and
17+
# limitations under the License.
18+
-->
19+
# Apache Hadoop 3.3.6 Release Notes
20+
21+
These release notes cover new developer and user-facing incompatibilities, important issues, features, and major improvements.
22+
23+
24+
---
25+
26+
* [HADOOP-18215](https://issues.apache.org/jira/browse/HADOOP-18215) | *Minor* | **Enhance WritableName to be able to return aliases for classes that use serializers**
27+
28+
If you have a SequenceFile with an old key or value class which has been renamed, you can use WritableName.addName to add an alias class. This functionality previously existed, but only worked for classes which extend Writable. It now works for any class, notably key or value classes which use io.serializations.
29+
30+
31+

hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/webapps/router/federationhealth.html

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -123,9 +123,10 @@
123123
<th>DataNodes usages% (Min/Median/Max/stdDev)</th>
124124
<td>{#NodeUsage.nodeUsage}{min} / {median} / {max} / {stdDev}{/NodeUsage.nodeUsage}</td>
125125
</tr>
126-
<tr><th><a href="#tab-datanode">Live Nodes</a></th><td>{NumLiveNodes} (Decommissioned: {NumDecomLiveNodes})</td></tr>
127-
<tr><th><a href="#tab-datanode">Dead Nodes</a></th><td>{NumDeadNodes} (Decommissioned: {NumDecomDeadNodes})</td></tr>
126+
<tr><th><a href="#tab-datanode">Live Nodes</a></th><td>{NumLiveNodes} (Decommissioned: {NumDecomLiveNodes}, In Maintenance: {NumInMaintenanceLiveDataNodes})</td></tr>
127+
<tr><th><a href="#tab-datanode">Dead Nodes</a></th><td>{NumDeadNodes} (Decommissioned: {NumDecomDeadNodes}, In Maintenance: {NumInMaintenanceDeadDataNodes})</td></tr>
128128
<tr><th><a href="#tab-datanode">Decommissioning Nodes</a></th><td>{NumDecommissioningNodes}</td></tr>
129+
<tr><th><a href="#tab-datanode">Entering Maintenance Nodes</a></th><td> {NumEnteringMaintenanceDataNodes}</td></tr>
129130
<tr><th title="Excludes missing blocks.">Number of Under-Replicated Blocks</th><td>{NumOfBlocksUnderReplicated}</td></tr>
130131
<tr><th>Number of Blocks Pending Deletion</th><td>{NumOfBlocksPendingDeletion}</td></tr>
131132
</table>

0 commit comments

Comments
 (0)