@@ -191,13 +191,13 @@ class InsertIntoHiveTableSuite extends QueryTest with BeforeAndAfter {
191
191
sql(" DROP TABLE hiveTableWithStructValue" )
192
192
}
193
193
194
- test(" SPARK-5498:partition schema does not match table schema" ){
194
+ test(" SPARK-5498:partition schema does not match table schema" ) {
195
195
val testData = TestHive .sparkContext.parallelize(
196
- (1 to 10 ).map(i => TestData (i, i.toString)))
196
+ (1 to 10 ).map(i => TestData (i, i.toString))).toDF()
197
197
testData.registerTempTable(" testData" )
198
198
199
199
val testDatawithNull = TestHive .sparkContext.parallelize(
200
- (1 to 10 ).map(i => ThreeCloumntable (i, i.toString,null )))
200
+ (1 to 10 ).map(i => ThreeCloumntable (i, i.toString,null ))).toDF()
201
201
202
202
val tmpDir = Files .createTempDir()
203
203
sql(s " CREATE TABLE table_with_partition(key int,value string) PARTITIONED by (ds string) location ' ${tmpDir.toURI.toString}' " )
@@ -206,25 +206,25 @@ class InsertIntoHiveTableSuite extends QueryTest with BeforeAndAfter {
206
206
// test schema the same between partition and table
207
207
sql(" ALTER TABLE table_with_partition CHANGE COLUMN key key BIGINT" )
208
208
checkAnswer(sql(" select key,value from table_with_partition where ds='1' " ),
209
- testData.toDataFrame. collect.toSeq
209
+ testData.collect.toSeq
210
210
)
211
211
212
212
// test difference type of field
213
213
sql(" ALTER TABLE table_with_partition CHANGE COLUMN key key BIGINT" )
214
214
checkAnswer(sql(" select key,value from table_with_partition where ds='1' " ),
215
- testData.toDataFrame. collect.toSeq
215
+ testData.collect.toSeq
216
216
)
217
217
218
218
// add column to table
219
219
sql(" ALTER TABLE table_with_partition ADD COLUMNS(key1 string)" )
220
220
checkAnswer(sql(" select key,value,key1 from table_with_partition where ds='1' " ),
221
- testDatawithNull.toDataFrame. collect.toSeq
221
+ testDatawithNull.collect.toSeq
222
222
)
223
223
224
224
// change column name to table
225
225
sql(" ALTER TABLE table_with_partition CHANGE COLUMN key keynew BIGINT" )
226
226
checkAnswer(sql(" select keynew,value from table_with_partition where ds='1' " ),
227
- testData.toDataFrame. collect.toSeq
227
+ testData.collect.toSeq
228
228
)
229
229
230
230
sql(" DROP TABLE table_with_partition" )
0 commit comments