Skip to content

Commit 1ed7136

Browse files
committed
Fix build break again.
1 parent 921b2e3 commit 1ed7136

File tree

3 files changed

+4
-4
lines changed

3 files changed

+4
-4
lines changed

sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -791,7 +791,7 @@ class SQLContext(@transient val sparkContext: SparkContext)
791791
* indicating if a table is a temporary one or not).
792792
*/
793793
def tables(): DataFrame = {
794-
createDataFrame(catalog.getTables(None)).toDataFrame("tableName", "isTemporary")
794+
createDataFrame(catalog.getTables(None)).toDF("tableName", "isTemporary")
795795
}
796796

797797
/**
@@ -800,7 +800,7 @@ class SQLContext(@transient val sparkContext: SparkContext)
800800
* indicating if a table is a temporary one or not).
801801
*/
802802
def tables(databaseName: String): DataFrame = {
803-
createDataFrame(catalog.getTables(Some(databaseName))).toDataFrame("tableName", "isTemporary")
803+
createDataFrame(catalog.getTables(Some(databaseName))).toDF("tableName", "isTemporary")
804804
}
805805

806806
/**

sql/core/src/test/scala/org/apache/spark/sql/ListTablesSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ class ListTablesSuite extends QueryTest with BeforeAndAfter {
2828
import org.apache.spark.sql.test.TestSQLContext.implicits._
2929

3030
val df =
31-
sparkContext.parallelize((1 to 10).map(i => (i,s"str$i"))).toDataFrame("key", "value")
31+
sparkContext.parallelize((1 to 10).map(i => (i,s"str$i"))).toDF("key", "value")
3232

3333
before {
3434
df.registerTempTable("ListTablesSuiteTable")

sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ class ListTablesSuite extends QueryTest with BeforeAndAfterAll {
2929
import org.apache.spark.sql.hive.test.TestHive.implicits._
3030

3131
val df =
32-
sparkContext.parallelize((1 to 10).map(i => (i,s"str$i"))).toDataFrame("key", "value")
32+
sparkContext.parallelize((1 to 10).map(i => (i,s"str$i"))).toDF("key", "value")
3333

3434
override def beforeAll(): Unit = {
3535
// The catalog in HiveContext is a case insensitive one.

0 commit comments

Comments
 (0)