@@ -45,6 +45,9 @@ setMethod("initialize", "DataFrame", function(.Object, sdf, isCached) {
45
45
46
46
# ' @rdname DataFrame
47
47
# ' @export
48
+ # '
49
+ # ' @param sdf A Java object reference to the backing Scala DataFrame
50
+ # ' @param isCached TRUE if the dataFrame is cached
48
51
dataFrame <- function (sdf , isCached = FALSE ) {
49
52
new(" DataFrame" , sdf , isCached )
50
53
}
@@ -244,7 +247,7 @@ setMethod("columns",
244
247
})
245
248
246
249
# ' @rdname columns
247
- # ' @export
250
+ # ' @aliases names,DataFrame,function-method
248
251
setMethod ("names ",
249
252
signature(x = " DataFrame" ),
250
253
function (x ) {
@@ -399,23 +402,23 @@ setMethod("repartition",
399
402
dataFrame(sdf )
400
403
})
401
404
402
- # ' toJSON
403
- # '
404
- # ' Convert the rows of a DataFrame into JSON objects and return an RDD where
405
- # ' each element contains a JSON string.
406
- # '
407
- # ' @param x A SparkSQL DataFrame
408
- # ' @return A StringRRDD of JSON objects
409
- # ' @rdname tojson
410
- # ' @export
411
- # ' @examples
412
- # ' \dontrun{
413
- # ' sc <- sparkR.init()
414
- # ' sqlCtx <- sparkRSQL.init(sc)
415
- # ' path <- "path/to/file.json"
416
- # ' df <- jsonFile(sqlCtx, path)
417
- # ' newRDD <- toJSON(df)
418
- # ' }
405
+ # toJSON
406
+ #
407
+ # Convert the rows of a DataFrame into JSON objects and return an RDD where
408
+ # each element contains a JSON string.
409
+ #
410
+ # @param x A SparkSQL DataFrame
411
+ # @return A StringRRDD of JSON objects
412
+ # @rdname tojson
413
+ # @export
414
+ # @examples
415
+ # \dontrun{
416
+ # sc <- sparkR.init()
417
+ # sqlCtx <- sparkRSQL.init(sc)
418
+ # path <- "path/to/file.json"
419
+ # df <- jsonFile(sqlCtx, path)
420
+ # newRDD <- toJSON(df)
421
+ # }
419
422
setMethod ("toJSON ",
420
423
signature(x = " DataFrame" ),
421
424
function (x ) {
@@ -578,8 +581,8 @@ setMethod("limit",
578
581
dataFrame(res )
579
582
})
580
583
581
- # Take the first NUM rows of a DataFrame and return a the results as a data.frame
582
-
584
+ # ' Take the first NUM rows of a DataFrame and return a the results as a data.frame
585
+ # '
583
586
# ' @rdname take
584
587
# ' @export
585
588
# ' @examples
@@ -644,22 +647,22 @@ setMethod("first",
644
647
take(x , 1 )
645
648
})
646
649
647
- # ' toRDD()
648
- # '
649
- # ' Converts a Spark DataFrame to an RDD while preserving column names.
650
- # '
651
- # ' @param x A Spark DataFrame
652
- # '
653
- # ' @rdname DataFrame
654
- # ' @export
655
- # ' @examples
656
- # ' \dontrun{
657
- # ' sc <- sparkR.init()
658
- # ' sqlCtx <- sparkRSQL.init(sc)
659
- # ' path <- "path/to/file.json"
660
- # ' df <- jsonFile(sqlCtx, path)
661
- # ' rdd <- toRDD(df)
662
- # ' }
650
+ # toRDD()
651
+ #
652
+ # Converts a Spark DataFrame to an RDD while preserving column names.
653
+ #
654
+ # @param x A Spark DataFrame
655
+ #
656
+ # @rdname DataFrame
657
+ # @export
658
+ # @examples
659
+ # \dontrun{
660
+ # sc <- sparkR.init()
661
+ # sqlCtx <- sparkRSQL.init(sc)
662
+ # path <- "path/to/file.json"
663
+ # df <- jsonFile(sqlCtx, path)
664
+ # rdd <- toRDD(df)
665
+ # }
663
666
setMethod ("toRDD ",
664
667
signature(x = " DataFrame" ),
665
668
function (x ) {
@@ -706,6 +709,7 @@ setMethod("groupBy",
706
709
# '
707
710
# ' Compute aggregates by specifying a list of columns
708
711
# '
712
+ # ' @param x a DataFrame
709
713
# ' @rdname DataFrame
710
714
# ' @export
711
715
setMethod ("agg ",
@@ -721,53 +725,53 @@ setMethod("agg",
721
725
# the requested map function. #
722
726
# ##################################################################################
723
727
724
- # ' @rdname lapply
728
+ # @rdname lapply
725
729
setMethod ("lapply ",
726
730
signature(X = " DataFrame" , FUN = " function" ),
727
731
function (X , FUN ) {
728
732
rdd <- toRDD(X )
729
733
lapply(rdd , FUN )
730
734
})
731
735
732
- # ' @rdname lapply
736
+ # @rdname lapply
733
737
setMethod ("map ",
734
738
signature(X = " DataFrame" , FUN = " function" ),
735
739
function (X , FUN ) {
736
740
lapply(X , FUN )
737
741
})
738
742
739
- # ' @rdname flatMap
743
+ # @rdname flatMap
740
744
setMethod ("flatMap ",
741
745
signature(X = " DataFrame" , FUN = " function" ),
742
746
function (X , FUN ) {
743
747
rdd <- toRDD(X )
744
748
flatMap(rdd , FUN )
745
749
})
746
750
747
- # ' @rdname lapplyPartition
751
+ # @rdname lapplyPartition
748
752
setMethod ("lapplyPartition ",
749
753
signature(X = " DataFrame" , FUN = " function" ),
750
754
function (X , FUN ) {
751
755
rdd <- toRDD(X )
752
756
lapplyPartition(rdd , FUN )
753
757
})
754
758
755
- # ' @rdname lapplyPartition
759
+ # @rdname lapplyPartition
756
760
setMethod ("mapPartitions ",
757
761
signature(X = " DataFrame" , FUN = " function" ),
758
762
function (X , FUN ) {
759
763
lapplyPartition(X , FUN )
760
764
})
761
765
762
- # ' @rdname foreach
766
+ # @rdname foreach
763
767
setMethod ("foreach ",
764
768
signature(x = " DataFrame" , func = " function" ),
765
769
function (x , func ) {
766
770
rdd <- toRDD(x )
767
771
foreach(rdd , func )
768
772
})
769
773
770
- # ' @rdname foreach
774
+ # @rdname foreach
771
775
setMethod ("foreachPartition ",
772
776
signature(x = " DataFrame" , func = " function" ),
773
777
function (x , func ) {
@@ -1009,7 +1013,7 @@ setMethod("sortDF",
1009
1013
})
1010
1014
1011
1015
# ' @rdname sortDF
1012
- # ' @export
1016
+ # ' @aliases orderBy,DataFrame,function-method
1013
1017
setMethod ("orderBy ",
1014
1018
signature(x = " DataFrame" , col = " characterOrColumn" ),
1015
1019
function (x , col ) {
@@ -1046,7 +1050,7 @@ setMethod("filter",
1046
1050
})
1047
1051
1048
1052
# ' @rdname filter
1049
- # ' @export
1053
+ # ' @aliases where,DataFrame,function-method
1050
1054
setMethod ("where ",
1051
1055
signature(x = " DataFrame" , condition = " characterOrColumn" ),
1052
1056
function (x , condition ) {
0 commit comments