Skip to content

Commit 83b6fc3

Browse files
committed
minor fix
1 parent 9bf12f8 commit 83b6fc3

File tree

4 files changed

+19
-11
lines changed

4 files changed

+19
-11
lines changed

sql/core/src/main/scala/org/apache/spark/sql/json/JSONRelation.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -43,8 +43,7 @@ private[sql] case class JSONRelation(
4343

4444
private def baseRDD = sqlContext.sparkContext.textFile(fileName)
4545

46-
override val schema =
47-
userSpecifiedSchema.getOrElse(
46+
override val schema = userSpecifiedSchema.getOrElse(
4847
JsonRDD.inferSchema(
4948
baseRDD,
5049
samplingRatio,

sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala

Lines changed: 16 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -100,9 +100,15 @@ private[sql] class DDLParser extends StandardTokenParsers with PackratParsers wi
100100
protected lazy val pair: Parser[(String, String)] = ident ~ stringLit ^^ { case k ~ v => (k,v) }
101101

102102
protected lazy val column: Parser[StructField] =
103-
ident ~ ident ^^ { case name ~ typ =>
103+
( ident ~ ident ^^ { case name ~ typ =>
104104
StructField(name, metastoreTypes.toDataType(typ))
105105
}
106+
|
107+
ident ~ ("decimal" ~ "(" ~> numericLit) ~ ("," ~> numericLit <~ ")") ^^ {
108+
case name ~ precision ~ scale =>
109+
StructField(name, DecimalType(precision.toInt, scale.toInt))
110+
}
111+
)
106112
}
107113

108114
/**
@@ -121,8 +127,8 @@ private[sql] class MetastoreTypes extends RegexParsers {
121127
"bigint" ^^^ LongType |
122128
"binary" ^^^ BinaryType |
123129
"boolean" ^^^ BooleanType |
124-
fixedDecimalType | // Hive 0.13+ decimal with precision/scale
125-
"decimal" ^^^ DecimalType.Unlimited | // Hive 0.12 decimal with no precision/scale
130+
fixedDecimalType | // decimal with precision/scale
131+
"decimal" ^^^ DecimalType.Unlimited | // decimal with no precision/scale
126132
"date" ^^^ DateType |
127133
"timestamp" ^^^ TimestampType |
128134
"varchar\\((\\d+)\\)".r ^^^ StringType
@@ -204,8 +210,13 @@ private[sql] case class CreateTableUsing(
204210
}
205211
val dataSource =
206212
clazz.newInstance().asInstanceOf[org.apache.spark.sql.sources.SchemaRelationProvider]
207-
val relation = dataSource.createRelation(
208-
sqlContext, new CaseInsensitiveMap(options), Some(StructType(tableCols)))
213+
val relation = if(tableCols.isEmpty) {
214+
dataSource.createRelation(
215+
sqlContext, new CaseInsensitiveMap(options))
216+
} else {
217+
dataSource.createRelation(
218+
sqlContext, new CaseInsensitiveMap(options), Some(StructType(tableCols)))
219+
}
209220

210221
sqlContext.baseRelationToSchemaRDD(relation).registerTempTable(tableName)
211222
Seq.empty

sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -41,9 +41,7 @@ trait RelationProvider {
4141
* Note: the parameters' keywords are case insensitive and this insensitivity is enforced
4242
* by the Map that is passed to the function.
4343
*/
44-
def createRelation(
45-
sqlContext: SQLContext,
46-
parameters: Map[String, String]): BaseRelation
44+
def createRelation(sqlContext: SQLContext, parameters: Map[String, String]): BaseRelation
4745
}
4846

4947
/**

sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ class QueryTest extends PlanTest {
7070
""".stripMargin)
7171
}
7272

73-
if (prepareAnswer(convertedAnswer) != prepareAnswer(sparkAnswer)) { // issues here, sparkAnswer may be GenericRow[]
73+
if (prepareAnswer(convertedAnswer) != prepareAnswer(sparkAnswer)) {
7474
fail(s"""
7575
|Results do not match for query:
7676
|${rdd.logicalPlan}

0 commit comments

Comments
 (0)