Skip to content

Commit c57cb91

Browse files
LuciferYangasiunov
authored andcommitted
[SPARK-37805][TESTS] Refactor TestUtils#configTestLog4j method to use log4j2 api
### What changes were proposed in this pull request? SPARK-37795 add a scalastyle rule to ban `org.apache.log4j` imports, but there is still one place to retain the imports of `org.apache.log4j` in `o.a.spark.TestUtils`. This pr refactor `configTestLog4j` method in `o.a.spark.TestUtils` to use log4j2 api and let the log behavior using log4j2.x be the same as that using log4j1.x before. In fact, the `configTestLog4j` method behavior before this pr is invalid because `PropertyConfigurator.configure` method in `org.apache.logging.log4j:log4j-1.2-api` is an empty method as follows: https://github.com/apache/logging-log4j2/blob/491a0b3787975b6fc95b6a8cb3da76dc7517c65f/log4j-1.2-api/src/main/java/org/apache/log4j/PropertyConfigurator.java#L39-L47 Another change of this pr is rename the method name from `configTestLog4j` to `configTestLog4j2`. ### Why are the changes needed? Clean up the `org.apache.log4j` imports left in Spark internal and let `configTestLog4j` method behavior keep consistent between log4j1.x and log4j2.x. ### Does this PR introduce _any_ user-facing change? The `configTestLog4j` method in `TestUtils` rename to `configTestLog4j2` ### How was this patch tested? - Pass the Jenkins or GitHub Action - Manual test Run the test cases using `configTestLog4j` method in the following 3 scenarios: 1. without this pr to test log4j2.x 2. with this pr to test log4j2.x 3. run `git reset --hard 1922798` to test log4j1.x For example `WholeStageCodegenSparkSubmitSuite`, run ``` mvn clean install -DskipTests -pl sql/core -am mvn test -pl sql/core -Dtest=none -DwildcardSuites=org.apache.spark.sql.execution.WholeStageCodegenSparkSubmitSuite ``` Scenario 1 does not print any logs to the console, scenario 2 and scenario 3 will print similar logs to the console Closes apache#35095 from LuciferYang/refactor-configTestLog4j. Authored-by: yangjie01 <yangjie01@baidu.com> Signed-off-by: Sean Owen <srowen@gmail.com> (cherry picked from commit f3eedaf)
1 parent c606214 commit c57cb91

File tree

5 files changed

+30
-28
lines changed

5 files changed

+30
-28
lines changed

core/src/main/scala/org/apache/spark/TestUtils.scala

Lines changed: 16 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ import java.nio.file.{Files => JavaFiles, Paths}
2424
import java.nio.file.attribute.PosixFilePermission.{OWNER_EXECUTE, OWNER_READ, OWNER_WRITE}
2525
import java.security.SecureRandom
2626
import java.security.cert.X509Certificate
27-
import java.util.{Arrays, EnumSet, Locale, Properties}
27+
import java.util.{Arrays, EnumSet, Locale}
2828
import java.util.concurrent.{TimeoutException, TimeUnit}
2929
import java.util.jar.{JarEntry, JarOutputStream, Manifest}
3030
import java.util.regex.Pattern
@@ -40,9 +40,10 @@ import scala.util.Try
4040

4141
import com.google.common.io.{ByteStreams, Files}
4242
import org.apache.commons.lang3.StringUtils
43-
// scalastyle:off
44-
import org.apache.log4j.PropertyConfigurator
45-
// scalastyle:on
43+
import org.apache.logging.log4j.LogManager
44+
import org.apache.logging.log4j.core.LoggerContext
45+
import org.apache.logging.log4j.core.appender.ConsoleAppender
46+
import org.apache.logging.log4j.core.config.builder.api.ConfigurationBuilderFactory
4647
import org.eclipse.jetty.server.Handler
4748
import org.eclipse.jetty.server.Server
4849
import org.eclipse.jetty.server.handler.DefaultHandler
@@ -412,17 +413,18 @@ private[spark] object TestUtils {
412413
}
413414

414415
/**
415-
* config a log4j properties used for testsuite
416+
* config a log4j2 properties used for testsuite
416417
*/
417-
def configTestLog4j(level: String): Unit = {
418-
val pro = new Properties()
419-
pro.put("log4j.rootLogger", s"$level, console")
420-
pro.put("log4j.appender.console", "org.apache.log4j.ConsoleAppender")
421-
pro.put("log4j.appender.console.target", "System.err")
422-
pro.put("log4j.appender.console.layout", "org.apache.log4j.PatternLayout")
423-
pro.put("log4j.appender.console.layout.ConversionPattern",
424-
"%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n")
425-
PropertyConfigurator.configure(pro)
418+
def configTestLog4j2(level: String): Unit = {
419+
val builder = ConfigurationBuilderFactory.newConfigurationBuilder()
420+
val appenderBuilder = builder.newAppender("console", "CONSOLE")
421+
.addAttribute("target", ConsoleAppender.Target.SYSTEM_ERR)
422+
appenderBuilder.add(builder.newLayout("PatternLayout")
423+
.addAttribute("pattern", "%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n"))
424+
builder.add(appenderBuilder)
425+
builder.add(builder.newRootLogger(level).add(builder.newAppenderRef("console")))
426+
val configuration = builder.build()
427+
LogManager.getContext(false).asInstanceOf[LoggerContext].reconfigure(configuration)
426428
}
427429

428430
/**

core/src/test/scala/org/apache/spark/DriverSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ class DriverSuite extends SparkFunSuite with TimeLimits {
5151
*/
5252
object DriverWithoutCleanup {
5353
def main(args: Array[String]): Unit = {
54-
TestUtils.configTestLog4j("INFO")
54+
TestUtils.configTestLog4j2("INFO")
5555
val conf = new SparkConf
5656
val sc = new SparkContext(args(0), "DriverWithoutCleanup", conf)
5757
sc.parallelize(1 to 100, 4).count()

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1520,7 +1520,7 @@ class SparkSubmitSuite
15201520

15211521
object JarCreationTest extends Logging {
15221522
def main(args: Array[String]): Unit = {
1523-
TestUtils.configTestLog4j("INFO")
1523+
TestUtils.configTestLog4j2("INFO")
15241524
val conf = new SparkConf()
15251525
val sc = new SparkContext(conf)
15261526
val result = sc.makeRDD(1 to 100, 10).mapPartitions { x =>
@@ -1544,7 +1544,7 @@ object JarCreationTest extends Logging {
15441544

15451545
object SimpleApplicationTest {
15461546
def main(args: Array[String]): Unit = {
1547-
TestUtils.configTestLog4j("INFO")
1547+
TestUtils.configTestLog4j2("INFO")
15481548
val conf = new SparkConf()
15491549
val sc = new SparkContext(conf)
15501550
val configs = Seq("spark.master", "spark.app.name")

sql/core/src/test/scala/org/apache/spark/sql/execution/WholeStageCodegenSparkSubmitSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ object WholeStageCodegenSparkSubmitSuite extends Assertions with Logging {
6060
var spark: SparkSession = _
6161

6262
def main(args: Array[String]): Unit = {
63-
TestUtils.configTestLog4j("INFO")
63+
TestUtils.configTestLog4j2("INFO")
6464

6565
spark = SparkSession.builder().getOrCreate()
6666

sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -371,7 +371,7 @@ class HiveSparkSubmitSuite
371371

372372
object SetMetastoreURLTest extends Logging {
373373
def main(args: Array[String]): Unit = {
374-
TestUtils.configTestLog4j("INFO")
374+
TestUtils.configTestLog4j2("INFO")
375375

376376
val sparkConf = new SparkConf(loadDefaults = true)
377377
val builder = SparkSession.builder()
@@ -409,7 +409,7 @@ object SetMetastoreURLTest extends Logging {
409409

410410
object SetWarehouseLocationTest extends Logging {
411411
def main(args: Array[String]): Unit = {
412-
TestUtils.configTestLog4j("INFO")
412+
TestUtils.configTestLog4j2("INFO")
413413

414414
val sparkConf = new SparkConf(loadDefaults = true).set(UI_ENABLED, false)
415415
val providedExpectedWarehouseLocation =
@@ -489,7 +489,7 @@ object SetWarehouseLocationTest extends Logging {
489489
// can load the jar defined with the function.
490490
object TemporaryHiveUDFTest extends Logging {
491491
def main(args: Array[String]): Unit = {
492-
TestUtils.configTestLog4j("INFO")
492+
TestUtils.configTestLog4j2("INFO")
493493
val conf = new SparkConf()
494494
conf.set(UI_ENABLED, false)
495495
val sc = new SparkContext(conf)
@@ -527,7 +527,7 @@ object TemporaryHiveUDFTest extends Logging {
527527
// can load the jar defined with the function.
528528
object PermanentHiveUDFTest1 extends Logging {
529529
def main(args: Array[String]): Unit = {
530-
TestUtils.configTestLog4j("INFO")
530+
TestUtils.configTestLog4j2("INFO")
531531
val conf = new SparkConf()
532532
conf.set(UI_ENABLED, false)
533533
val sc = new SparkContext(conf)
@@ -565,7 +565,7 @@ object PermanentHiveUDFTest1 extends Logging {
565565
// can load the jar defined with the function.
566566
object PermanentHiveUDFTest2 extends Logging {
567567
def main(args: Array[String]): Unit = {
568-
TestUtils.configTestLog4j("INFO")
568+
TestUtils.configTestLog4j2("INFO")
569569
val conf = new SparkConf()
570570
conf.set(UI_ENABLED, false)
571571
val sc = new SparkContext(conf)
@@ -600,7 +600,7 @@ object PermanentHiveUDFTest2 extends Logging {
600600
// We test if we can load user jars in both driver and executors when HiveContext is used.
601601
object SparkSubmitClassLoaderTest extends Logging {
602602
def main(args: Array[String]): Unit = {
603-
TestUtils.configTestLog4j("INFO")
603+
TestUtils.configTestLog4j2("INFO")
604604
val conf = new SparkConf()
605605
val hiveWarehouseLocation = Utils.createTempDir()
606606
conf.set(UI_ENABLED, false)
@@ -670,7 +670,7 @@ object SparkSubmitClassLoaderTest extends Logging {
670670
// We test if we can correctly set spark sql configurations when HiveContext is used.
671671
object SparkSQLConfTest extends Logging {
672672
def main(args: Array[String]): Unit = {
673-
TestUtils.configTestLog4j("INFO")
673+
TestUtils.configTestLog4j2("INFO")
674674
// We override the SparkConf to add spark.sql.hive.metastore.version and
675675
// spark.sql.hive.metastore.jars to the beginning of the conf entry array.
676676
// So, if metadataHive get initialized after we set spark.sql.hive.metastore.version but
@@ -711,7 +711,7 @@ object SPARK_9757 extends QueryTest {
711711
protected var spark: SparkSession = _
712712

713713
def main(args: Array[String]): Unit = {
714-
TestUtils.configTestLog4j("INFO")
714+
TestUtils.configTestLog4j2("INFO")
715715

716716
val hiveWarehouseLocation = Utils.createTempDir()
717717
val sparkContext = new SparkContext(
@@ -760,7 +760,7 @@ object SPARK_11009 extends QueryTest {
760760
protected var spark: SparkSession = _
761761

762762
def main(args: Array[String]): Unit = {
763-
TestUtils.configTestLog4j("INFO")
763+
TestUtils.configTestLog4j2("INFO")
764764

765765
val sparkContext = new SparkContext(
766766
new SparkConf()
@@ -791,7 +791,7 @@ object SPARK_14244 extends QueryTest {
791791
protected var spark: SparkSession = _
792792

793793
def main(args: Array[String]): Unit = {
794-
TestUtils.configTestLog4j("INFO")
794+
TestUtils.configTestLog4j2("INFO")
795795

796796
val sparkContext = new SparkContext(
797797
new SparkConf()

0 commit comments

Comments
 (0)