18
18
package org .apache .spark .sql .hive .execution
19
19
20
20
import scala .collection .JavaConversions ._
21
- import scala .collection .mutable
22
21
23
22
import org .apache .hadoop .hive .common .`type` .{HiveDecimal , HiveVarchar }
24
23
import org .apache .hadoop .hive .conf .HiveConf
@@ -31,14 +30,12 @@ import org.apache.hadoop.hive.serde2.Serializer
31
30
import org .apache .hadoop .hive .serde2 .objectinspector .ObjectInspectorUtils .ObjectInspectorCopyOption
32
31
import org .apache .hadoop .hive .serde2 .objectinspector ._
33
32
import org .apache .hadoop .hive .serde2 .objectinspector .primitive .{JavaHiveDecimalObjectInspector , JavaHiveVarcharObjectInspector }
34
- import org .apache .hadoop .io .Writable
35
33
import org .apache .hadoop .mapred .{FileOutputCommitter , FileOutputFormat , JobConf }
36
34
37
- import org .apache .spark .SparkContext ._
38
35
import org .apache .spark .annotation .DeveloperApi
39
36
import org .apache .spark .rdd .RDD
40
37
import org .apache .spark .sql .catalyst .expressions .Row
41
- import org .apache .spark .sql .execution .{SparkPlan , UnaryNode }
38
+ import org .apache .spark .sql .execution .{Command , SparkPlan , UnaryNode }
42
39
import org .apache .spark .sql .hive ._
43
40
import org .apache .spark .{SerializableWritable , SparkException , TaskContext }
44
41
@@ -52,7 +49,7 @@ case class InsertIntoHiveTable(
52
49
child : SparkPlan ,
53
50
overwrite : Boolean )
54
51
(@ transient sc : HiveContext )
55
- extends UnaryNode {
52
+ extends UnaryNode with Command {
56
53
57
54
@ transient lazy val outputClass = newSerializer(table.tableDesc).getSerializedClass
58
55
@ transient private lazy val hiveContext = new Context (sc.hiveconf)
@@ -172,16 +169,14 @@ case class InsertIntoHiveTable(
172
169
}
173
170
}
174
171
175
- override def execute () = result
176
-
177
172
/**
178
173
* Inserts all the rows in the table into Hive. Row objects are properly serialized with the
179
174
* `org.apache.hadoop.hive.serde2.SerDe` and the
180
175
* `org.apache.hadoop.mapred.OutputFormat` provided by the table definition.
181
176
*
182
177
* Note: this is run once and then kept to avoid double insertions.
183
178
*/
184
- private lazy val result : RDD [Row ] = {
179
+ override protected [sql] lazy val sideEffectResult : Seq [Row ] = {
185
180
// Have to pass the TableDesc object to RDD.mapPartitions and then instantiate new serializer
186
181
// instances within the closure, since Serializer is not serializable while TableDesc is.
187
182
val tableDesc = table.tableDesc
@@ -293,6 +288,6 @@ case class InsertIntoHiveTable(
293
288
// however for now we return an empty list to simplify compatibility checks with hive, which
294
289
// does not return anything for insert operations.
295
290
// TODO: implement hive compatibility as rules.
296
- sc.sparkContext.makeRDD( Nil , 1 )
291
+ Seq .empty[ Row ]
297
292
}
298
293
}
0 commit comments