Skip to content

Commit 281fc61

Browse files
committed
Fix and comment address
1 parent d76d6a5 commit 281fc61

File tree

15 files changed

+38
-40
lines changed

15 files changed

+38
-40
lines changed

docs/sql-keywords.md

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,15 +19,15 @@ license: |
1919
limitations under the License.
2020
---
2121

22-
When `spark.sql.dialect.ansi.enabled` is true, Spark SQL has two kinds of keywords:
22+
When `spark.sql.dialect.spark.ansi.enabled` is true, Spark SQL has two kinds of keywords:
2323
* Reserved keywords: Keywords that are reserved and can't be used as identifiers for table, view, column, function, alias, etc.
2424
* Non-reserved keywords: Keywords that have a special meaning only in particular contexts and can be used as identifiers in other contexts. For example, `SELECT 1 WEEK` is an interval literal, but WEEK can be used as identifiers in other places.
2525

26-
When `spark.sql.dialect.ansi.enabled` is false, Spark SQL has two kinds of keywords:
27-
* Non-reserved keywords: Same definition as the one when `spark.sql.dialect.ansi.enabled=true`.
26+
When `spark.sql.dialect.spark.ansi.enabled` is false, Spark SQL has two kinds of keywords:
27+
* Non-reserved keywords: Same definition as the one when `spark.sql.dialect.spark.ansi.enabled=true`.
2828
* Strict-non-reserved keywords: A strict version of non-reserved keywords, which can not be used as table alias.
2929

30-
By default `spark.sql.dialect.ansi.enabled` is false.
30+
By default `spark.sql.dialect.spark.ansi.enabled` is false.
3131

3232
Below is a list of all the keywords in Spark SQL.
3333

sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -954,7 +954,7 @@ number
954954
| MINUS? BIGDECIMAL_LITERAL #bigDecimalLiteral
955955
;
956956

957-
// When `spark.sql.dialect.ansi.enabled=true`, there are 2 kinds of keywords in Spark SQL.
957+
// When `spark.sql.dialect.spark.ansi.enabled=true`, there are 2 kinds of keywords in Spark SQL.
958958
// - Reserved keywords:
959959
// Keywords that are reserved and can't be used as identifiers for table, view, column,
960960
// function, alias, etc.
@@ -1154,9 +1154,9 @@ ansiNonReserved
11541154
| YEARS
11551155
;
11561156

1157-
// When `spark.sql.dialect.ansi.enabled=false`, there are 2 kinds of keywords in Spark SQL.
1157+
// When `spark.sql.dialect.spark.ansi.enabled=false`, there are 2 kinds of keywords in Spark SQL.
11581158
// - Non-reserved keywords:
1159-
// Same definition as the one when `spark.sql.dialect.ansi.enabled=true`.
1159+
// Same definition as the one when `spark.sql.dialect.spark.ansi.enabled=true`.
11601160
// - Strict-non-reserved keywords:
11611161
// A strict version of non-reserved keywords, which can not be used as table alias.
11621162
// You can find the full keywords list by searching "Start of the keywords list" in this file.

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -592,7 +592,7 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit
592592
* Change the precision / scale in a given decimal to those set in `decimalType` (if any),
593593
* modifying `value` in-place and returning it if successful. If an overflow occurs, it
594594
* either returns null or throws an exception according to the value set for
595-
* `spark.sql.dialect.ansi.enabled`.
595+
* `spark.sql.dialect.spark.ansi.enabled`.
596596
*
597597
* NOTE: this modifies `value` in-place, so don't call it on external data.
598598
*/
@@ -611,7 +611,7 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit
611611

612612
/**
613613
* Create new `Decimal` with precision and scale given in `decimalType` (if any).
614-
* If overflow occurs, if `spark.sql.dialect.ansi.enabled` is false, null is returned;
614+
* If overflow occurs, if `spark.sql.dialect.spark.ansi.enabled` is false, null is returned;
615615
* otherwise, an `ArithmeticException` is thrown.
616616
*/
617617
private[this] def toPrecision(value: Decimal, decimalType: DecimalType): Decimal =

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -150,7 +150,7 @@ abstract class BinaryArithmetic extends BinaryOperator with NullIntolerant {
150150
sys.error("BinaryArithmetics must override either calendarIntervalMethod or genCode")
151151

152152
// Name of the function for the exact version of this expression in [[Math]].
153-
// If the option "spark.sql.dialect.ansi.enabled" is enabled and there is corresponding
153+
// If the option "spark.sql.dialect.spark.ansi.enabled" is enabled and there is corresponding
154154
// function in [[Math]], the exact function will be called instead of evaluation with [[symbol]].
155155
def exactMathMethod: Option[String] = None
156156

sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1657,17 +1657,16 @@ object SQLConf {
16571657

16581658
val ANSI_ENABLED = buildConf("spark.sql.ansi.enabled")
16591659
.doc("This configuration will be deprecated in the future releases and replaced by" +
1660-
"spark.sql.dialect.ansi.enabled, we keep it now for forward compatibility.")
1660+
"spark.sql.dialect.spark.ansi.enabled, we keep it now for forward compatibility.")
16611661
.booleanConf
16621662
.createWithDefault(false)
16631663

1664-
val DIALECT_SPARK_ANSI_ENABLED = buildConf("spark.sql.dialect.ansi.enabled")
1664+
val DIALECT_SPARK_ANSI_ENABLED = buildConf("spark.sql.dialect.spark.ansi.enabled")
16651665
.doc("When true, Spark tries to conform to the ANSI SQL specification: 1. Spark will " +
16661666
"throw a runtime exception if an overflow occurs in any operation on integral/decimal " +
16671667
"field. 2. Spark will forbid using the reserved keywords of ANSI SQL as identifiers in " +
16681668
"the SQL parser.")
1669-
.booleanConf
1670-
.createWithDefault(false)
1669+
.fallbackConf(ANSI_ENABLED)
16711670

16721671
val ALLOW_CREATING_MANAGED_TABLE_USING_NONEMPTY_LOCATION =
16731672
buildConf("spark.sql.legacy.allowCreatingManagedTableUsingNonemptyLocation")
@@ -2510,8 +2509,7 @@ class SQLConf extends Serializable with Logging {
25102509

25112510
def usePostgreSQLDialect: Boolean = getConf(DIALECT) == Dialect.POSTGRESQL.toString
25122511

2513-
def dialectSparkAnsiEnabled: Boolean =
2514-
getConf(DIALECT_SPARK_ANSI_ENABLED) || getConf(ANSI_ENABLED)
2512+
def dialectSparkAnsiEnabled: Boolean = getConf(DIALECT_SPARK_ANSI_ENABLED)
25152513

25162514
def ansiEnabled: Boolean = usePostgreSQLDialect || dialectSparkAnsiEnabled
25172515

sql/core/src/test/resources/sql-tests/inputs/ansi/interval.sql

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
-- Turns on ANSI mode
2-
SET spark.sql.dialect.ansi.enabled=true;
2+
SET spark.sql.dialect.spark.ansi.enabled=true;
33

44
select
55
'1' second,
@@ -185,4 +185,4 @@ select date '2012-01-01' + interval (a + 1) day from t;
185185
select date '2012-01-01' + (a + 1) day from t;
186186

187187
-- Turns off ANSI mode
188-
SET spark.sql.dialect.ansi.enabled=false;
188+
SET spark.sql.dialect.spark.ansi.enabled=false;

sql/core/src/test/resources/sql-tests/inputs/decimalArithmeticOperations.sql

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ select 123456789123456789.1234567890 * 1.123456789123456789;
8484
select 12345678912345.123456789123 / 0.000000012345678;
8585

8686
-- throw an exception instead of returning NULL, according to SQL ANSI 2011
87-
set spark.sql.dialect.ansi.enabled=true;
87+
set spark.sql.dialect.spark.ansi.enabled=true;
8888

8989
-- test operations between decimals and constants
9090
select id, a*10, b/10 from decimals_test order by id;

sql/core/src/test/resources/sql-tests/inputs/higher-order-functions.sql

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ select transform_values(ys, (k, v) -> k + v) as v from nested;
8888
select transform(ys, all -> all * all) as v from values (array(32, 97)) as t(ys);
8989
select transform(ys, (all, i) -> all + i) as v from values (array(32, 97)) as t(ys);
9090

91-
set spark.sql.dialect.ansi.enabled=true;
91+
set spark.sql.dialect.spark.ansi.enabled=true;
9292
select transform(ys, all -> all * all) as v from values (array(32, 97)) as t(ys);
9393
select transform(ys, (all, i) -> all + i) as v from values (array(32, 97)) as t(ys);
94-
set spark.sql.dialect.ansi.enabled=false;
94+
set spark.sql.dialect.spark.ansi.enabled=false;

sql/core/src/test/resources/sql-tests/inputs/literals.sql

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -144,7 +144,7 @@ select interval 1 year '12:11:10' hour to second;
144144
select interval '10-9' year to month '1' year;
145145
select interval '12:11:10' hour to second '1' year;
146146
-- malformed interval literal with ansi mode
147-
SET spark.sql.dialect.ansi.enabled=true;
147+
SET spark.sql.dialect.spark.ansi.enabled=true;
148148
select interval;
149149
select interval 1 fake_unit;
150150
select interval 1 year to month;

sql/core/src/test/resources/sql-tests/inputs/postgreSQL/text.sql

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -46,9 +46,9 @@ select concat_ws(NULL,10,20,null,30) is null;
4646
select reverse('abcde');
4747
-- [SPARK-28036] Built-in udf left/right has inconsistent behavior
4848
-- [SPARK-28479][SPARK-28989] Parser error when enabling ANSI mode
49-
set spark.sql.dialect.ansi.enabled=false;
49+
set spark.sql.dialect.spark.ansi.enabled=false;
5050
select i, left('ahoj', i), right('ahoj', i) from range(-5, 6) t(i) order by i;
51-
set spark.sql.dialect.ansi.enabled=true;
51+
set spark.sql.dialect.spark.ansi.enabled=true;
5252
-- [SPARK-28037] Add built-in String Functions: quote_literal
5353
-- select quote_literal('');
5454
-- select quote_literal('abc''');

0 commit comments

Comments
 (0)