Skip to content

Commit 25ec746

Browse files
authored
fix mistakes (#9)
1 parent a471f33 commit 25ec746

File tree

4 files changed

+22
-11
lines changed

4 files changed

+22
-11
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2952,8 +2952,9 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg
29522952
protected def getSerdeInfo(
29532953
rowFormatCtx: Seq[RowFormatContext],
29542954
createFileFormatCtx: Seq[CreateFileFormatContext],
2955-
ctx: ParserRuleContext): Option[SerdeInfo] = {
2956-
validateRowFormatFileFormat(rowFormatCtx, createFileFormatCtx, ctx)
2955+
ctx: ParserRuleContext,
2956+
skipCheck: Boolean = false): Option[SerdeInfo] = {
2957+
if (!skipCheck) validateRowFormatFileFormat(rowFormatCtx, createFileFormatCtx, ctx)
29572958
val rowFormatSerdeInfo = rowFormatCtx.map(visitRowFormat)
29582959
val fileFormatSerdeInfo = createFileFormatCtx.map(visitCreateFileFormat)
29592960
(fileFormatSerdeInfo ++ rowFormatSerdeInfo).reduceLeftOption((l, r) => l.merge(r))

sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -297,7 +297,7 @@ class ResolveSessionCatalog(
297297
assertNoNullTypeInSchema(c.asSelect.schema)
298298
}
299299
val (storageFormat, provider) = getStorageFormatAndProvider(
300-
c.provider, c.options, c.location, c.serde, ctas = false)
300+
c.provider, c.options, c.location, c.serde, ctas = true)
301301
if (!isV2Provider(provider)) {
302302
val tableDesc = buildCatalogTable(tbl.asTableIdentifier, new StructType,
303303
c.partitioning, c.bucketSpec, c.properties, provider, c.location,

sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala

Lines changed: 15 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -438,13 +438,23 @@ class SparkSqlAstBuilder extends AstBuilder {
438438
checkDuplicateClauses(ctx.TBLPROPERTIES, "TBLPROPERTIES", ctx)
439439
val provider = ctx.tableProvider.asScala.headOption.map(_.multipartIdentifier.getText)
440440
val location = visitLocationSpecList(ctx.locationSpec())
441-
// rowStorage used to determine CatalogStorageFormat.serde and
442-
// CatalogStorageFormat.properties in STORED AS clause.
443-
val serdeInfo = getSerdeInfo(ctx.rowFormat.asScala, ctx.createFileFormat.asScala, ctx)
441+
// TODO: Do not skip serde check for CREATE TABLE LIKE.
442+
val serdeInfo = getSerdeInfo(
443+
ctx.rowFormat.asScala, ctx.createFileFormat.asScala, ctx, skipCheck = true)
444444
if (provider.isDefined && serdeInfo.isDefined) {
445445
operationNotAllowed(s"CREATE TABLE LIKE ... USING ... ${serdeInfo.get.describe}", ctx)
446446
}
447447

448+
// TODO: remove this restriction as it seems unnecessary.
449+
serdeInfo match {
450+
case Some(SerdeInfo(storedAs, formatClasses, serde, _)) =>
451+
if (storedAs.isEmpty && formatClasses.isEmpty && serde.isDefined) {
452+
throw new ParseException("'ROW FORMAT' must be used with 'STORED AS'", ctx)
453+
}
454+
case _ =>
455+
}
456+
457+
// TODO: also look at `HiveSerDe.getDefaultStorage`.
448458
val storage = toStorageFormat(location, serdeInfo, ctx)
449459
val properties = Option(ctx.tableProps).map(visitPropertyKeyValues).getOrElse(Map.empty)
450460
CreateTableLikeCommand(
@@ -603,7 +613,8 @@ class SparkSqlAstBuilder extends AstBuilder {
603613
*/
604614
override def visitInsertOverwriteHiveDir(
605615
ctx: InsertOverwriteHiveDirContext): InsertDirParams = withOrigin(ctx) {
606-
val serdeInfo = getSerdeInfo(Seq(ctx.rowFormat), Seq(ctx.createFileFormat), ctx)
616+
val serdeInfo = getSerdeInfo(
617+
Option(ctx.rowFormat).toSeq, Option(ctx.createFileFormat).toSeq, ctx)
607618
val path = string(ctx.path)
608619
// The path field is required
609620
if (path.isEmpty) {

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2780,7 +2780,7 @@ class HiveDDLSuite
27802780
|ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
27812781
""".stripMargin)
27822782
}.getMessage
2783-
assert(e.contains("'ROW FORMAT' must be used with 'STORED AS'"))
2783+
assert(e.contains("Operation not allowed: CREATE TABLE LIKE ... USING ... ROW FORMAT SERDE"))
27842784

27852785
// row format doesn't work with provider hive
27862786
e = intercept[AnalysisException] {
@@ -2791,7 +2791,7 @@ class HiveDDLSuite
27912791
|WITH SERDEPROPERTIES ('test' = 'test')
27922792
""".stripMargin)
27932793
}.getMessage
2794-
assert(e.contains("'ROW FORMAT' must be used with 'STORED AS'"))
2794+
assert(e.contains("Operation not allowed: CREATE TABLE LIKE ... USING ... ROW FORMAT SERDE"))
27952795

27962796
// row format doesn't work without 'STORED AS'
27972797
e = intercept[AnalysisException] {
@@ -2813,8 +2813,7 @@ class HiveDDLSuite
28132813
|ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
28142814
""".stripMargin)
28152815
}.getMessage
2816-
assert(e.contains(
2817-
"'INPUTFORMAT hiveFormat' and 'USING provider' should not be specified both"))
2816+
assert(e.contains("Operation not allowed: CREATE TABLE LIKE ... USING ... STORED AS"))
28182817

28192818
// row format works with STORED AS hive format (from hive table)
28202819
spark.sql(

0 commit comments

Comments
 (0)