@@ -118,8 +118,14 @@ class DataFrame protected[sql](
118
118
}
119
119
120
120
/** Left here for compatibility reasons. */
121
+ @ deprecated(" 1.3.0" , " use toDataFrame" )
121
122
def toSchemaRDD : DataFrame = this
122
123
124
+ /**
125
+ * Return the object itself. Used to force an implicit conversion from RDD to DataFrame in Scala.
126
+ */
127
+ def toDF : DataFrame = this
128
+
123
129
/** Return the schema of this [[DataFrame ]]. */
124
130
override def schema : StructType = queryExecution.analyzed.schema
125
131
@@ -501,7 +507,7 @@ class DataFrame protected[sql](
501
507
502
508
/**
503
509
* Registers this RDD as a temporary table using the given name. The lifetime of this temporary
504
- * table is tied to the [[SQLContext ]] that was used to create this SchemaRDD .
510
+ * table is tied to the [[SQLContext ]] that was used to create this DataFrame .
505
511
*
506
512
* @group schema
507
513
*/
@@ -510,20 +516,20 @@ class DataFrame protected[sql](
510
516
}
511
517
512
518
/**
513
- * Saves the contents of this [[DataFrame ]] as a parquet file, preserving the schema. Files that
514
- * are written out using this method can be read back in as a [[DataFrame ]] using the `parquetFile`
515
- * function.
519
+ * Saves the contents of this [[DataFrame ]] as a parquet file, preserving the schema.
520
+ * Files that are written out using this method can be read back in as a [[DataFrame ]]
521
+ * using the `parquetFile` function in [[ SQLContext ]] .
516
522
*/
517
523
override def saveAsParquetFile (path : String ): Unit = {
518
524
sqlContext.executePlan(WriteToFile (path, logicalPlan)).toRdd
519
525
}
520
526
521
527
/**
522
528
* :: Experimental ::
523
- * Creates a table from the the contents of this SchemaRDD . This will fail if the table already
529
+ * Creates a table from the the contents of this DataFrame . This will fail if the table already
524
530
* exists.
525
531
*
526
- * Note that this currently only works with SchemaRDDs that are created from a HiveContext as
532
+ * Note that this currently only works with DataFrame that are created from a HiveContext as
527
533
* there is no notion of a persisted catalog in a standard SQL context. Instead you can write
528
534
* an RDD out to a parquet file, and then register that file as a table. This "table" can then
529
535
* be the target of an `insertInto`.
0 commit comments