Skip to content

Commit

Permalink
[SPARK-37792][CORE] Fix the check of custom configuration in SparkShe…
Browse files Browse the repository at this point in the history
…llLoggingFilter

### What changes were proposed in this pull request?

This patch fixes the way we check if the log came from root or some custom configuration in `SparkShellLoggingFilter`.

### Why are the changes needed?

Previously in log4j 1.x the way we check if the log came from root or some custom configuration in `SparkShellLoggingFilter` does not work with log4j 2 now.

### Does this PR introduce _any_ user-facing change?

No

### How was this patch tested?

Manual test

Closes apache#35080 from viirya/SPARK-37792.

Authored-by: Liang-Chi Hsieh <viirya@gmail.com>
Signed-off-by: Kousuke Saruta <sarutak@oss.nttdata.com>
  • Loading branch information
viirya authored and sarutak committed Jan 5, 2022
1 parent 98e1c77 commit 3b88bc8
Show file tree
Hide file tree
Showing 3 changed files with 42 additions and 19 deletions.
19 changes: 13 additions & 6 deletions core/src/main/scala/org/apache/spark/internal/Logging.scala
Original file line number Diff line number Diff line change
Expand Up @@ -259,18 +259,25 @@ private[spark] object Logging {
} else if (logEvent.getLevel.isMoreSpecificThan(Logging.sparkShellThresholdLevel)) {
Filter.Result.NEUTRAL
} else {
var logger = LogManager.getLogger(logEvent.getLoggerName)
.asInstanceOf[Log4jLogger]
while (logger.getParent() != null) {
if (logger.getLevel != null || !logger.getAppenders.isEmpty) {
val logger = LogManager.getLogger(logEvent.getLoggerName).asInstanceOf[Log4jLogger]
if (loggerWithCustomConfig(logger)) {
return Filter.Result.NEUTRAL
}
logger = logger.getParent()
}
Filter.Result.DENY
}
}

// Return true if the logger has custom configuration. It depends on:
// 1. If the logger isn't attached with root logger config (i.e., with custom configuration), or
// 2. the logger level is different to root config level (i.e., it is changed programmatically).
//
// Note that if a logger is programmatically changed log level but set to same level
// as root config level, we cannot tell if it is with custom configuration.
private def loggerWithCustomConfig(logger: Log4jLogger): Boolean = {
val rootConfig = LogManager.getRootLogger.asInstanceOf[Log4jLogger].get()
(logger.get() ne rootConfig) || (logger.getLevel != rootConfig.getLevel())
}

override def getState: LifeCycle.State = status

override def initialize(): Unit = {
Expand Down
23 changes: 16 additions & 7 deletions core/src/test/scala/org/apache/spark/internal/LoggingSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -36,26 +36,35 @@ class LoggingSuite extends SparkFunSuite {
val originalThreshold = Logging.sparkShellThresholdLevel
Logging.sparkShellThresholdLevel = Level.WARN
try {
// without custom log level configured
val logger1 = LogManager.getLogger("a.b.c.D")
.asInstanceOf[Logger]
val logEvent1 = new Builder().setLevel(Level.INFO)
.setLoggerName(logger1.getName()).setMessage(new SimpleMessage("Test")).build()
// Logger's default level is not null in log4j2, and cannot be set to null too.
assert(ssf.filter(logEvent1) == Filter.Result.NEUTRAL)
assert(ssf.filter(logEvent1) == Filter.Result.DENY)

// custom log level configured
// custom log level configured (programmingly)
val parentLogger = LogManager.getLogger("a.b.c")
.asInstanceOf[Logger]
parentLogger.setLevel(Level.INFO)
assert(ssf.filter(logEvent1) == Filter.Result.NEUTRAL)
parentLogger.setLevel(Level.DEBUG)
val logEvent2 = new Builder().setLevel(Level.INFO)
.setLoggerName(parentLogger.getName()).setMessage(new SimpleMessage("Test")).build()
assert(ssf.filter(logEvent2) == Filter.Result.NEUTRAL)

// custom log level configured (by log4j2.properties)
val jettyLogger = LogManager.getLogger("org.sparkproject.jetty")
.asInstanceOf[Logger]
val logEvent3 = new Builder().setLevel(Level.INFO)
.setLoggerName(jettyLogger.getName()).setMessage(new SimpleMessage("Test")).build()
assert(ssf.filter(logEvent3) != Filter.Result.DENY)

// log level is greater than or equal to threshold level
val logger2 = LogManager.getLogger("a.b.E")
.asInstanceOf[Logger]
val logEvent2 = new Builder().setLevel(Level.INFO)
val logEvent4 = new Builder().setLevel(Level.INFO)
.setLoggerName(logger2.getName()).setMessage(new SimpleMessage("Test")).build()
Utils.setLogLevel(Level.INFO)
assert(ssf.filter(logEvent2) != Filter.Result.DENY)
assert(ssf.filter(logEvent4) != Filter.Result.DENY)
} finally {
rootLogger.setLevel(originalLevel)
Logging.sparkShellThresholdLevel = originalThreshold
Expand Down
19 changes: 13 additions & 6 deletions repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -290,6 +290,9 @@ class ReplSuite extends SparkFunSuite with BeforeAndAfterAll {
|# Set the log level for this class to ERROR same as the default setting.
|logger.repl.name = org.apache.spark.repl.Main
|logger.repl.level = error
|
|logger.customLogger2.name = customLogger2
|logger.customLogger2.level = info
|""".stripMargin

val log4jprops = Files.createTempFile("log4j2.properties.d", "log4j2.properties")
Expand All @@ -301,11 +304,11 @@ class ReplSuite extends SparkFunSuite with BeforeAndAfterAll {
val originalReplThresholdLevel = Logging.sparkShellThresholdLevel

val replLoggerLogMessage = "Log level for REPL: "
val debugLogMessage1 = "debugLogMessage1 should not be output"
val warnLogMessage1 = "warnLogMessage1 should be output"
val warnLogMessage1 = "warnLogMessage1 should not be output"
val errorLogMessage1 = "errorLogMessage1 should be output"
val infoLogMessage1 = "infoLogMessage2 should be output"
val infoLogMessage2 = "infoLogMessage3 should be output"
val debugLogMessage1 = "debugLogMessage1 should be output"

val out = try {
val context = LogManager.getContext(false).asInstanceOf[LoggerContext]
Expand Down Expand Up @@ -337,21 +340,25 @@ class ReplSuite extends SparkFunSuite with BeforeAndAfterAll {
| // so this inherits the settings of rootLogger
| // but ConsoleAppender can use a different log level.
| val customLogger1 = LogManager.getLogger("customLogger1")
| customLogger1.debug("$debugLogMessage1")
| customLogger1.warn("$warnLogMessage1")
| customLogger1.error("$errorLogMessage1")
|
| // customLogger2 is explicitly configured its log level as INFO
| // so info level messages logged via customLogger2 should be output.
| val customLogger2 = LogManager.getLogger("customLogger2").asInstanceOf[Logger]
| customLogger2.setLevel(Level.INFO)
| customLogger2.info("$infoLogMessage1")
|
| // customLogger2 is explicitly configured its log level
| // so its child should inherit the settings.
| val customLogger3 = LogManager.getLogger("customLogger2.child")
| customLogger3.info("$infoLogMessage2")
|
| // customLogger4 is programmingly configured its log level as DEBUG
| // so debug level messages logged via customLogger4 should be output.
| val customLogger4 = LogManager.getLogger("customLogger4").asInstanceOf[Logger]
| customLogger4.setLevel(Level.DEBUG)
| customLogger4.debug("$debugLogMessage1")
|
| // echo log messages
| bout.toString
|} finally {
Expand Down Expand Up @@ -386,10 +393,10 @@ class ReplSuite extends SparkFunSuite with BeforeAndAfterAll {
// Ensure log level threshold for REPL is ERROR.
assertContains(replLoggerLogMessage + "ERROR", out)

assertDoesNotContain(debugLogMessage1, out)
assertContains(warnLogMessage1, out)
assertDoesNotContain(warnLogMessage1, out)
assertContains(errorLogMessage1, out)
assertContains(infoLogMessage1, out)
assertContains(infoLogMessage2, out)
assertContains(debugLogMessage1, out)
}
}

0 comments on commit 3b88bc8

Please sign in to comment.