Skip to content

[SPARK-21830][SQL] Bump ANTLR version and fix a few issues. #19042

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion dev/deps/spark-deps-hadoop-2.6
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ activation-1.1.1.jar
aircompressor-0.3.jar
antlr-2.7.7.jar
antlr-runtime-3.4.jar
antlr4-runtime-4.5.3.jar
antlr4-runtime-4.7.jar
aopalliance-1.0.jar
aopalliance-repackaged-2.4.0-b34.jar
apache-log4j-extras-1.2.17.jar
Expand Down
2 changes: 1 addition & 1 deletion dev/deps/spark-deps-hadoop-2.7
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ activation-1.1.1.jar
aircompressor-0.3.jar
antlr-2.7.7.jar
antlr-runtime-3.4.jar
antlr4-runtime-4.5.3.jar
antlr4-runtime-4.7.jar
aopalliance-1.0.jar
aopalliance-repackaged-2.4.0-b34.jar
apache-log4j-extras-1.2.17.jar
Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@
<jodd.version>3.5.2</jodd.version>
<jsr305.version>1.3.9</jsr305.version>
<libthrift.version>0.9.3</libthrift.version>
<antlr4.version>4.5.3</antlr4.version>
<antlr4.version>4.7</antlr4.version>
<jpam.version>1.1</jpam.version>
<selenium.version>2.52.0</selenium.version>
<paranamer.version>2.6</paranamer.version>
Expand Down
1 change: 1 addition & 0 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -474,6 +474,7 @@ object OldDeps {

object Catalyst {
lazy val settings = antlr4Settings ++ Seq(
antlr4Version in Antlr4 := "4.7",
antlr4PackageName in Antlr4 := Some("org.apache.spark.sql.catalyst.parser"),
antlr4GenListener in Antlr4 := true,
antlr4GenVisitor in Antlr4 := true
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,10 @@ singleDataType
: dataType EOF
;

singleTableSchema
: colTypeList EOF
;

statement
: query #statementDefault
| USE db=identifier #use
Expand Down Expand Up @@ -974,7 +978,7 @@ CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP';

STRING
: '\'' ( ~('\''|'\\') | ('\\' .) )* '\''
| '\"' ( ~('\"'|'\\') | ('\\' .) )* '\"'
| '"' ( ~('"'|'\\') | ('\\' .) )* '"'
;

BIGINT_LITERAL
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,10 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging
visitSparkDataType(ctx.dataType)
}

override def visitSingleTableSchema(ctx: SingleTableSchemaContext): StructType = {
withOrigin(ctx)(StructType(visitColTypeList(ctx.colTypeList)))
}

/* ********************************************************************************************
* Plan parsing
* ******************************************************************************************** */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ abstract class AbstractSqlParser extends ParserInterface with Logging {
* definitions which will preserve the correct Hive metadata.
*/
override def parseTableSchema(sqlText: String): StructType = parse(sqlText) { parser =>
StructType(astBuilder.visitColTypeList(parser.colTypeList()))
astBuilder.visitSingleTableSchema(parser.singleTableSchema())
}

/** Creates LogicalPlan for a given SQL string. */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,10 +79,12 @@ class TableSchemaParserSuite extends SparkFunSuite {
}

// Negative cases
assertError("")
assertError("a")
assertError("a INT b long")
assertError("a INT,, b long")
assertError("a INT, b long,,")
assertError("a INT, b long, c int,")
test("Negative cases") {
assertError("")
assertError("a")
assertError("a INT b long")
assertError("a INT,, b long")
assertError("a INT, b long,,")
assertError("a INT, b long, c int,")
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ struct<>
-- !query 13 output
org.apache.spark.sql.catalyst.parser.ParseException

mismatched input '<EOF>' expecting 'LIKE'(line 1, pos 19)
mismatched input '<EOF>' expecting {'FROM', 'IN', 'LIKE'}(line 1, pos 19)

== SQL ==
SHOW TABLE EXTENDED
Expand All @@ -187,7 +187,7 @@ struct<>
-- !query 15 output
org.apache.spark.sql.catalyst.parser.ParseException

mismatched input 'PARTITION' expecting 'LIKE'(line 1, pos 20)
mismatched input 'PARTITION' expecting {'FROM', 'IN', 'LIKE'}(line 1, pos 20)

== SQL ==
SHOW TABLE EXTENDED PARTITION(c='Us', d=1)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -468,7 +468,7 @@ class JDBCWriteSuite extends SharedSQLContext with BeforeAndAfter {
.option("createTableColumnTypes", "`name char(20)") // incorrectly quoted column
.jdbc(url1, "TEST.USERDBTYPETEST", properties)
}.getMessage()
assert(msg.contains("no viable alternative at input"))
assert(msg.contains("extraneous input"))
}

test("SPARK-10849: jdbc CreateTableColumnTypes duplicate columns") {
Expand Down