Skip to content

Commit f3916e7

Browse files
committed
Remove unnecessary checks and add comments
1 parent 6fe1dd0 commit f3916e7

File tree

1 file changed

+1
-7
lines changed

1 file changed

+1
-7
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -363,8 +363,6 @@ private[joins] object UnsafeHashedRelation {
363363
private[execution] final class LongToUnsafeRowMap(val mm: TaskMemoryManager, capacity: Int)
364364
extends MemoryConsumer(mm) with Externalizable with KryoSerializable {
365365

366-
private val ARRAY_MAX = ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH
367-
368366
// Whether the keys are stored in dense mode or not.
369367
private var isDense = false
370368

@@ -619,18 +617,14 @@ private[execution] final class LongToUnsafeRowMap(val mm: TaskMemoryManager, cap
619617
}
620618

621619
private def grow(inputRowSize: Int): Unit = {
620+
// There is 8 bytes for the pointer to next value
622621
val neededNumWords = (cursor - Platform.LONG_ARRAY_OFFSET + 8 + inputRowSize + 7) / 8
623622
if (neededNumWords > page.length) {
624623
if (neededNumWords > (1 << 30)) {
625624
throw new UnsupportedOperationException(
626625
"Can not build a HashedRelation that is larger than 8G")
627626
}
628627
val newNumWords = math.max(neededNumWords, math.min(page.length * 2, 1 << 30))
629-
if (newNumWords > ARRAY_MAX) {
630-
throw new UnsupportedOperationException(
631-
"Cannot grow internal buffer by size " + newNumWords +
632-
" because the size after growing " + "exceeds size limitation " + ARRAY_MAX)
633-
}
634628
ensureAcquireMemory(newNumWords * 8L)
635629
val newPage = new Array[Long](newNumWords.toInt)
636630
Platform.copyMemory(page, Platform.LONG_ARRAY_OFFSET, newPage, Platform.LONG_ARRAY_OFFSET,

0 commit comments

Comments
 (0)