Skip to content

Commit 201e08c

Browse files
belieferHyukjinKwon
authored andcommitted
[SPARK-42680][CONNECT][TESTS] Create the helper function withSQLConf for connect test framework
### What changes were proposed in this pull request? Spark SQL have the helper function `withSQLConf` that is easy to change SQL config and make test easy. ### Why are the changes needed? Make the connect test cases easy to implement. ### Does this PR introduce _any_ user-facing change? No, it is a test only change. ### How was this patch tested? Test case updated. Closes #40296 from beliefer/SPARK-42680. Authored-by: Jiaan Geng <beliefer@163.com> Signed-off-by: Hyukjin Kwon <gurwls223@apache.org>
1 parent dfdc4a1 commit 201e08c

File tree

2 files changed

+54
-5
lines changed

2 files changed

+54
-5
lines changed

connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ import org.apache.spark.sql.connect.client.util.{IntegrationTestUtils, RemoteSpa
3131
import org.apache.spark.sql.functions.{aggregate, array, broadcast, col, count, lit, rand, sequence, shuffle, struct, transform, udf}
3232
import org.apache.spark.sql.types._
3333

34-
class ClientE2ETestSuite extends RemoteSparkSession {
34+
class ClientE2ETestSuite extends RemoteSparkSession with SQLHelper {
3535

3636
// Spark Result
3737
test("spark result schema") {
@@ -501,16 +501,13 @@ class ClientE2ETestSuite extends RemoteSparkSession {
501501
}
502502

503503
test("broadcast join") {
504-
spark.conf.set("spark.sql.autoBroadcastJoinThreshold", "-1")
505-
try {
504+
withSQLConf("spark.sql.autoBroadcastJoinThreshold" -> "-1") {
506505
val left = spark.range(100).select(col("id"), rand(10).as("a"))
507506
val right = spark.range(100).select(col("id"), rand(12).as("a"))
508507
val joined =
509508
left.join(broadcast(right), left("id") === right("id")).select(left("id"), right("a"))
510509
assert(joined.schema.catalogString === "struct<id:bigint,a:double>")
511510
testCapturedStdOut(joined.explain(), "BroadcastHashJoin")
512-
} finally {
513-
spark.conf.set("spark.sql.autoBroadcastJoinThreshold", "10MB")
514511
}
515512
}
516513

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
package org.apache.spark.sql
18+
19+
trait SQLHelper {
20+
21+
def spark: SparkSession
22+
23+
/**
24+
* Sets all SQL configurations specified in `pairs`, calls `f`, and then restores all SQL
25+
* configurations.
26+
*/
27+
protected def withSQLConf(pairs: (String, String)*)(f: => Unit): Unit = {
28+
val (keys, values) = pairs.unzip
29+
val currentValues = keys.map { key =>
30+
if (spark.conf.getOption(key).isDefined) {
31+
Some(spark.conf.get(key))
32+
} else {
33+
None
34+
}
35+
}
36+
(keys, values).zipped.foreach { (k, v) =>
37+
if (spark.conf.isModifiable(k)) {
38+
spark.conf.set(k, v)
39+
} else {
40+
throw new AnalysisException(s"Cannot modify the value of a static config: $k")
41+
}
42+
43+
}
44+
try f
45+
finally {
46+
keys.zip(currentValues).foreach {
47+
case (key, Some(value)) => spark.conf.set(key, value)
48+
case (key, None) => spark.conf.unset(key)
49+
}
50+
}
51+
}
52+
}

0 commit comments

Comments
 (0)