File tree Expand file tree Collapse file tree 1 file changed +11
-2
lines changed
sql/core/src/main/scala/org/apache/spark/sql/execution Expand file tree Collapse file tree 1 file changed +11
-2
lines changed Original file line number Diff line number Diff line change @@ -21,7 +21,7 @@ import java.util.concurrent.atomic.AtomicBoolean
21
21
22
22
import scala .collection .mutable .ArrayBuffer
23
23
import scala .concurrent .{Await , ExecutionContext , Future }
24
- import scala .concurrent .duration .Duration
24
+ import scala .concurrent .duration ._
25
25
26
26
import org .apache .spark .Logging
27
27
import org .apache .spark .rdd .{RDD , RDDOperationScope }
@@ -139,10 +139,19 @@ abstract class SparkPlan extends QueryPlan[SparkPlan] with Logging with Serializ
139
139
140
140
children.foreach(_.prepare())
141
141
142
+ val timeout : Duration = {
143
+ val timeoutValue = sqlContext.conf.broadcastTimeout
144
+ if (timeoutValue < 0 ) {
145
+ Duration .Inf
146
+ } else {
147
+ timeoutValue.seconds
148
+ }
149
+ }
150
+
142
151
// fill in the result of subqueries
143
152
queryResults.foreach {
144
153
case (e, futureResult) =>
145
- val rows = Await .result(futureResult, Duration . Inf )
154
+ val rows = Await .result(futureResult, timeout )
146
155
if (rows.length > 1 ) {
147
156
sys.error(s " Scalar subquery should return at most one row, but got ${rows.length}: " +
148
157
s " ${e.query.treeString}" )
You can’t perform that action at this time.
0 commit comments