Skip to content

Commit 33e0fe8

Browse files
committed
Merge branch 'ESPARK-135' into 'spark_2.1'
[HOTFIX] 修复打包编译时候的语法检查错误 修复打包编译时候的语法检查错误 See merge request !83
2 parents 5abc232 + 60d8bed commit 33e0fe8

File tree

3 files changed

+12
-12
lines changed

3 files changed

+12
-12
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/ObjectAggregationIterator.scala

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -75,8 +75,8 @@ class ObjectAggregationIterator(
7575
FromUnsafeProjection(outputAttributes.map(_.dataType))
7676

7777
/**
78-
* Start processing input rows.
79-
*/
78+
* Start processing input rows.
79+
*/
8080
processInputs()
8181

8282
override final def hasNext: Boolean = {
@@ -89,8 +89,8 @@ class ObjectAggregationIterator(
8989
}
9090

9191
/**
92-
* Generate an output row when there is no input and there is no grouping expression.
93-
*/
92+
* Generate an output row when there is no input and there is no grouping expression.
93+
*/
9494
def outputForEmptyGroupingKeyWithoutInput(): UnsafeRow = {
9595
if (groupingExpressions.isEmpty) {
9696
val defaultAggregationBuffer = createNewAggregationBuffer()
@@ -233,9 +233,9 @@ class SortBasedAggregator(
233233
}
234234

235235
/**
236-
* Returns a destructive iterator of AggregationBufferEntry.
237-
* Notice: it is illegal to call any method after `destructiveIterator()` has been called.
238-
*/
236+
* Returns a destructive iterator of AggregationBufferEntry.
237+
* Notice: it is illegal to call any method after `destructiveIterator()` has been called.
238+
*/
239239
def destructiveIterator(): Iterator[AggregationBufferEntry] = {
240240
new Iterator[AggregationBufferEntry] {
241241
val inputIterator = inputSorter.sortedIterator()

sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/ObjectAggregationMap.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -60,9 +60,9 @@ class ObjectAggregationMap() {
6060
}
6161

6262
/**
63-
* Dumps all entries into a newly created external sorter, clears the hash map, and returns the
64-
* external sorter.
65-
*/
63+
* Dumps all entries into a newly created external sorter, clears the hash map, and returns the
64+
* external sorter.
65+
*/
6666
def dumpToExternalSorter(
6767
groupingAttributes: Seq[Attribute],
6868
aggregateFunctions: Seq[AggregateFunction]): UnsafeKVExternalSorter = {

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package org.apache.spark.sql.hive.thriftserver
1919

2020
import java.security.PrivilegedExceptionAction
2121
import java.sql.{Date, Timestamp}
22-
import java.util.{Arrays, UUID, Map => JMap}
22+
import java.util.{Arrays, Map => JMap, UUID}
2323
import java.util.concurrent.RejectedExecutionException
2424

2525
import scala.collection.JavaConverters._
@@ -37,7 +37,7 @@ import org.apache.hive.service.cli.session.HiveSession
3737
import org.apache.spark.SparkContext
3838
import org.apache.spark.deploy.SparkHadoopUtil
3939
import org.apache.spark.internal.Logging
40-
import org.apache.spark.sql.{DataFrame, SQLContext, Row => SparkRow}
40+
import org.apache.spark.sql.{DataFrame, Row => SparkRow, SQLContext}
4141
import org.apache.spark.sql.execution.command.SetCommand
4242
import org.apache.spark.sql.hive.HiveUtils
4343
import org.apache.spark.sql.internal.SQLConf

0 commit comments

Comments
 (0)