Skip to content

Commit 22a69d1

Browse files
committed
[SPARK-26564] Fix wrong assertions and error messages for parameter checking
Additional fix to respect the existing error checking and revise the messages
1 parent cc56479 commit 22a69d1

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/exchange/BroadcastExchangeExec.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ case class BroadcastExchangeExec(
7979
val (numRows, input) = child.executeCollectIterator()
8080
if (numRows >= 512000000) {
8181
throw new SparkException(
82-
s"Cannot broadcast the table with more than 512 millions rows: $numRows rows")
82+
s"Cannot broadcast the table with 512 million or more rows: $numRows rows")
8383
}
8484

8585
val beforeBuild = System.nanoTime()

sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -413,7 +413,7 @@ private[execution] final class LongToUnsafeRowMap(val mm: TaskMemoryManager, cap
413413

414414
private def init(): Unit = {
415415
if (mm != null) {
416-
require(capacity <= 512000000, "Cannot broadcast more than 512 millions rows")
416+
require(capacity < 512000000, "Cannot broadcast 512 million or more rows")
417417
var n = 1
418418
while (n < capacity) n *= 2
419419
ensureAcquireMemory(n * 2L * 8 + (1 << 20))

0 commit comments

Comments
 (0)