@@ -54,9 +54,9 @@ infer_type <- function(x) {
5454 # StructType
5555 types <- lapply(x , infer_type )
5656 fields <- lapply(1 : length(x ), function (i ) {
57- field (names [[i ]], types [[i ]], TRUE )
57+ structField (names [[i ]], types [[i ]], TRUE )
5858 })
59- do.call(buildSchema , fields )
59+ do.call(structType , fields )
6060 }
6161 } else if (length(x ) > 1 ) {
6262 list (type = " array" , elementType = type , containsNull = TRUE )
@@ -110,7 +110,7 @@ createDataFrame <- function(sqlCtx, data, schema = NULL, samplingRatio = 1.0) {
110110 stop(paste(" unexpected type:" , class(data )))
111111 }
112112
113- if (is.null(schema ) || (! inherits(schema , " struct " ) && is.null(names(schema )))) {
113+ if (is.null(schema ) || (! inherits(schema , " structType " ) && is.null(names(schema )))) {
114114 row <- first(rdd )
115115 names <- if (is.null(schema )) {
116116 names(row )
@@ -135,18 +135,18 @@ createDataFrame <- function(sqlCtx, data, schema = NULL, samplingRatio = 1.0) {
135135
136136 types <- lapply(row , infer_type )
137137 fields <- lapply(1 : length(row ), function (i ) {
138- field (names [[i ]], types [[i ]], TRUE )
138+ structField (names [[i ]], types [[i ]], TRUE )
139139 })
140- schema <- do.call(buildSchema , fields )
140+ schema <- do.call(structType , fields )
141141 }
142142
143- stopifnot(class(schema ) == " struct " )
144- schemaString <- tojson(schema )
143+ stopifnot(class(schema ) == " structType " )
144+ # schemaString <- tojson(schema)
145145
146146 jrdd <- getJRDD(lapply(rdd , function (x ) x ), " row" )
147147 srdd <- callJMethod(jrdd , " rdd" )
148148 sdf <- callJStatic(" org.apache.spark.sql.api.r.SQLUtils" , " createDF" ,
149- srdd , schemaString , sqlCtx )
149+ srdd , schema $ jobj , sqlCtx )
150150 dataFrame(sdf )
151151}
152152
0 commit comments