Skip to content

Commit 1c7a662

Browse files
ulysses-youdbatomic
authored andcommitted
[SPARK-46227][SQL] Move withSQLConf from SQLHelper to SQLConfHelper
### What changes were proposed in this pull request? This pr moves method `withSQLConf` from `SQLHelper` in catalyst test module to `SQLConfHelper` trait in catalyst module. To make it easy to use such case: `val x = withSQLConf {}`, this pr also changes its return type. ### Why are the changes needed? A part of apache#44013 ### Does this PR introduce _any_ user-facing change? no ### How was this patch tested? Pass CI ### Was this patch authored or co-authored using generative AI tooling? no Closes apache#44142 from ulysses-you/withSQLConf. Authored-by: ulysses-you <ulyssesyou18@gmail.com> Signed-off-by: Dongjoon Hyun <dhyun@apple.com>
1 parent 514e6b0 commit 1c7a662

File tree

5 files changed

+34
-33
lines changed

5 files changed

+34
-33
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SQLConfHelper.scala

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717

1818
package org.apache.spark.sql.catalyst
1919

20+
import org.apache.spark.sql.AnalysisException
2021
import org.apache.spark.sql.internal.SQLConf
2122

2223
/**
@@ -29,4 +30,32 @@ trait SQLConfHelper {
2930
* See [[SQLConf.get]] for more information.
3031
*/
3132
def conf: SQLConf = SQLConf.get
33+
34+
/**
35+
* Sets all SQL configurations specified in `pairs`, calls `f`, and then restores all SQL
36+
* configurations.
37+
*/
38+
protected def withSQLConf[T](pairs: (String, String)*)(f: => T): T = {
39+
val conf = SQLConf.get
40+
val (keys, values) = pairs.unzip
41+
val currentValues = keys.map { key =>
42+
if (conf.contains(key)) {
43+
Some(conf.getConfString(key))
44+
} else {
45+
None
46+
}
47+
}
48+
keys.lazyZip(values).foreach { (k, v) =>
49+
if (SQLConf.isStaticConfigKey(k)) {
50+
throw new AnalysisException(s"Cannot modify the value of a static config: $k")
51+
}
52+
conf.setConfString(k, v)
53+
}
54+
try f finally {
55+
keys.zip(currentValues).foreach {
56+
case (key, Some(value)) => conf.setConfString(key, value)
57+
case (key, None) => conf.unsetConf(key)
58+
}
59+
}
60+
}
3261
}

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SQLHelper.scala

Lines changed: 2 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -23,41 +23,13 @@ import scala.util.control.NonFatal
2323

2424
import org.scalatest.Assertions.fail
2525

26-
import org.apache.spark.sql.AnalysisException
26+
import org.apache.spark.sql.catalyst.SQLConfHelper
2727
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils
2828
import org.apache.spark.sql.catalyst.util.DateTimeUtils.getZoneId
2929
import org.apache.spark.sql.internal.SQLConf
3030
import org.apache.spark.util.Utils
3131

32-
trait SQLHelper {
33-
34-
/**
35-
* Sets all SQL configurations specified in `pairs`, calls `f`, and then restores all SQL
36-
* configurations.
37-
*/
38-
protected def withSQLConf(pairs: (String, String)*)(f: => Unit): Unit = {
39-
val conf = SQLConf.get
40-
val (keys, values) = pairs.unzip
41-
val currentValues = keys.map { key =>
42-
if (conf.contains(key)) {
43-
Some(conf.getConfString(key))
44-
} else {
45-
None
46-
}
47-
}
48-
keys.lazyZip(values).foreach { (k, v) =>
49-
if (SQLConf.isStaticConfigKey(k)) {
50-
throw new AnalysisException(s"Cannot modify the value of a static config: $k")
51-
}
52-
conf.setConfString(k, v)
53-
}
54-
try f finally {
55-
keys.zip(currentValues).foreach {
56-
case (key, Some(value)) => conf.setConfString(key, value)
57-
case (key, None) => conf.unsetConf(key)
58-
}
59-
}
60-
}
32+
trait SQLHelper extends SQLConfHelper {
6133

6234
/**
6335
* Generates a temporary path without creating the actual file/directory, then pass it to `f`. If

sql/core/src/test/scala/org/apache/spark/sql/internal/ExecutorSideSQLConfSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ class ExecutorSideSQLConfSuite extends SparkFunSuite with SQLTestUtils {
5858
}
5959
}
6060

61-
override def withSQLConf(pairs: (String, String)*)(f: => Unit): Unit = {
61+
override def withSQLConf[T](pairs: (String, String)*)(f: => T): T = {
6262
pairs.foreach { case (k, v) =>
6363
SQLConf.get.setConfString(k, v)
6464
}

sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -243,7 +243,7 @@ private[sql] trait SQLTestUtilsBase
243243
protected override def _sqlContext: SQLContext = self.spark.sqlContext
244244
}
245245

246-
protected override def withSQLConf(pairs: (String, String)*)(f: => Unit): Unit = {
246+
protected override def withSQLConf[T](pairs: (String, String)*)(f: => T): T = {
247247
SparkSession.setActiveSession(spark)
248248
super.withSQLConf(pairs: _*)(f)
249249
}

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ class HiveSerDeSuite extends HiveComparisonTest with PlanTest with BeforeAndAfte
8484
}
8585

8686
// Make sure we set the config values to TestHive.conf.
87-
override def withSQLConf(pairs: (String, String)*)(f: => Unit): Unit =
87+
override def withSQLConf[T](pairs: (String, String)*)(f: => T): T =
8888
SQLConf.withExistingConf(TestHive.conf)(super.withSQLConf(pairs: _*)(f))
8989

9090
test("Test the default fileformat for Hive-serde tables") {

0 commit comments

Comments
 (0)