Skip to content

Commit fae38f4

Browse files
committed
Fix style
1 parent fdca896 commit fae38f4

File tree

3 files changed

+7
-3
lines changed

3 files changed

+7
-3
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Projection.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -484,4 +484,4 @@ class JoinedRow5 extends Row {
484484
val row = (if (row1 != null) row1 else Seq[Any]()) ++ (if (row2 != null) row2 else Seq[Any]())
485485
s"[${row.mkString(",")}]"
486486
}
487-
}
487+
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SpecificRow.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -298,4 +298,4 @@ class SpecificMutableRow(val values: Array[MutableValue]) extends MutableRow {
298298
override def getByte(i: Int): Byte = {
299299
values(i).asInstanceOf[MutableByte].value
300300
}
301-
}
301+
}

sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlSerializer.scala

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,11 @@ private[sql] class OpenHashSetSerializer extends Serializer[OpenHashSet[_]] {
139139
val set = new OpenHashSet[Any](numItems + 1)
140140
var i = 0
141141
while (i < numItems) {
142-
val row = new GenericRow(rowSerializer.read(kryo, input, classOf[Array[Any]].asInstanceOf[Class[Any]]).asInstanceOf[Array[Any]])
142+
val row =
143+
new GenericRow(rowSerializer.read(
144+
kryo,
145+
input,
146+
classOf[Array[Any]].asInstanceOf[Class[Any]]).asInstanceOf[Array[Any]])
143147
set.add(row)
144148
i += 1
145149
}

0 commit comments

Comments
 (0)