Skip to content

Commit 48145d3

Browse files
committed
fix order, using asc by default
1 parent 343db39 commit 48145d3

File tree

2 files changed

+9
-3
lines changed

2 files changed

+9
-3
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -209,9 +209,8 @@ class SqlParser extends AbstractSparkSQLParser {
209209
)
210210

211211
protected lazy val ordering: Parser[Seq[SortOrder]] =
212-
( rep1sep(singleOrder, ",")
213-
| rep1sep(expression, ",") ~ direction.? ^^ {
214-
case exps ~ d => exps.map(SortOrder(_, d.getOrElse(Ascending)))
212+
( rep1sep(expression ~ direction.? , ",") ^^ {
213+
case exps => exps.map(pair => SortOrder(pair._1, pair._2.getOrElse(Ascending)))
215214
}
216215
)
217216

sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -987,6 +987,13 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
987987
)
988988
}
989989

990+
test("oder by asc by default when not specify ascending and descending") {
991+
checkAnswer(
992+
sql("SELECT a, b FROM testData2 ORDER BY a desc, b"),
993+
Seq((3, 1), (3, 2), (2, 1), (2,2), (1, 1), (1, 2))
994+
)
995+
}
996+
990997
test("Supporting relational operator '<=>' in Spark SQL") {
991998
val nullCheckData1 = TestData(1,"1") :: TestData(2,null) :: Nil
992999
val rdd1 = sparkContext.parallelize((0 to 1).map(i => nullCheckData1(i)))

0 commit comments

Comments
 (0)