Skip to content

Commit 1574a43

Browse files
committed
using result.collect
1 parent 8b2d845 commit 1574a43

File tree

2 files changed

+4
-6
lines changed
  • sql/hive-thriftserver
    • v0.12.0/src/main/scala/org/apache/spark/sql/hive/thriftserver
    • v0.13.1/src/main/scala/org/apache/spark/sql/hive/thriftserver

2 files changed

+4
-6
lines changed

sql/hive-thriftserver/v0.12.0/src/main/scala/org/apache/spark/sql/hive/thriftserver/Shim12.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -202,13 +202,12 @@ private[hive] class SparkExecuteStatementOperation(
202202
hiveContext.sparkContext.setLocalProperty("spark.scheduler.pool", pool)
203203
}
204204
iter = {
205-
val resultRdd = result.queryExecution.toRdd
206205
val useIncrementalCollect =
207206
hiveContext.getConf("spark.sql.thriftServer.incrementalCollect", "false").toBoolean
208207
if (useIncrementalCollect) {
209-
resultRdd.map(_.copy()).toLocalIterator
208+
result.toLocalIterator
210209
} else {
211-
resultRdd.map(_.copy()).collect().iterator
210+
result.collect().iterator
212211
}
213212
}
214213
dataTypes = result.queryExecution.analyzed.output.map(_.dataType).toArray

sql/hive-thriftserver/v0.13.1/src/main/scala/org/apache/spark/sql/hive/thriftserver/Shim13.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -87,13 +87,12 @@ private[hive] class SparkExecuteStatementOperation(
8787
val groupId = round(random * 1000000).toString
8888
hiveContext.sparkContext.setJobGroup(groupId, statement)
8989
iter = {
90-
val resultRdd = result.queryExecution.toRdd
9190
val useIncrementalCollect =
9291
hiveContext.getConf("spark.sql.thriftServer.incrementalCollect", "false").toBoolean
9392
if (useIncrementalCollect) {
94-
resultRdd.map(_.copy()).toLocalIterator
93+
result.toLocalIterator
9594
} else {
96-
resultRdd.map(_.copy()).collect().iterator
95+
result.collect().iterator
9796
}
9897
}
9998
dataTypes = result.queryExecution.analyzed.output.map(_.dataType).toArray

0 commit comments

Comments
 (0)