@@ -64,14 +64,14 @@ private[sql] class DDLParser extends StandardTokenParsers with PackratParsers wi
6464
6565 // Data types.
6666 protected val STRING = Keyword (" STRING" )
67- protected val FLOAT = Keyword (" FLOAT " )
68- protected val INT = Keyword (" INT " )
67+ protected val BINARY = Keyword (" BINARY " )
68+ protected val BOOLEAN = Keyword (" BOOLEAN " )
6969 protected val TINYINT = Keyword (" TINYINT" )
7070 protected val SMALLINT = Keyword (" SMALLINT" )
71- protected val DOUBLE = Keyword (" DOUBLE " )
71+ protected val INT = Keyword (" INT " )
7272 protected val BIGINT = Keyword (" BIGINT" )
73- protected val BINARY = Keyword (" BINARY " )
74- protected val BOOLEAN = Keyword (" BOOLEAN " )
73+ protected val FLOAT = Keyword (" FLOAT " )
74+ protected val DOUBLE = Keyword (" DOUBLE " )
7575 protected val DECIMAL = Keyword (" DECIMAL" )
7676 protected val DATE = Keyword (" DATE" )
7777 protected val TIMESTAMP = Keyword (" TIMESTAMP" )
@@ -105,8 +105,8 @@ private[sql] class DDLParser extends StandardTokenParsers with PackratParsers wi
105105 CREATE ~ TEMPORARY ~ TABLE ~> ident
106106 ~ (tableCols).? ~ (USING ~> className) ~ (OPTIONS ~> options) ^^ {
107107 case tableName ~ columns ~ provider ~ opts =>
108- val tblColumns = if ( columns.isEmpty) Seq .empty else columns.get
109- CreateTableUsing (tableName, tblColumns , provider, opts)
108+ val userSpecifiedSchema = columns.flatMap(fields => Some ( StructType (fields)))
109+ CreateTableUsing (tableName, userSpecifiedSchema , provider, opts)
110110 }
111111 )
112112
@@ -184,7 +184,7 @@ private[sql] class DDLParser extends StandardTokenParsers with PackratParsers wi
184184
185185private [sql] case class CreateTableUsing (
186186 tableName : String ,
187- tableCols : Seq [ StructField ],
187+ userSpecifiedSchema : Option [ StructType ],
188188 provider : String ,
189189 options : Map [String , String ]) extends RunnableCommand {
190190
@@ -203,16 +203,9 @@ private[sql] case class CreateTableUsing(
203203 .asInstanceOf [org.apache.spark.sql.sources.RelationProvider ]
204204 .createRelation(sqlContext, new CaseInsensitiveMap (options))
205205 case dataSource : org.apache.spark.sql.sources.SchemaRelationProvider =>
206- if (tableCols.isEmpty) {
207- dataSource
208- .asInstanceOf [org.apache.spark.sql.sources.SchemaRelationProvider ]
209- .createRelation(sqlContext, new CaseInsensitiveMap (options))
210- } else {
211- dataSource
212- .asInstanceOf [org.apache.spark.sql.sources.SchemaRelationProvider ]
213- .createRelation(
214- sqlContext, new CaseInsensitiveMap (options), Some (StructType (tableCols)))
215- }
206+ dataSource
207+ .asInstanceOf [org.apache.spark.sql.sources.SchemaRelationProvider ]
208+ .createRelation(sqlContext, new CaseInsensitiveMap (options), userSpecifiedSchema)
216209 }
217210
218211 sqlContext.baseRelationToSchemaRDD(relation).registerTempTable(tableName)
0 commit comments