Skip to content

Commit 08999f9

Browse files
committed
Code review
1 parent 6abf844 commit 08999f9

File tree

7 files changed

+67
-59
lines changed

7 files changed

+67
-59
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -304,7 +304,6 @@ object FunctionRegistry {
304304
expression[AnyAgg]("any"),
305305
expression[SomeAgg]("some"),
306306

307-
308307
// string functions
309308
expression[Ascii]("ascii"),
310309
expression[Chr]("char"),

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -289,7 +289,7 @@ trait RuntimeReplaceable extends UnaryExpression with Unevaluable {
289289
* with other databases. For example, we use this to support every, any/some aggregates by rewriting
290290
* them with Min and Max respectively.
291291
*/
292-
trait UnevaluableAggrgate extends DeclarativeAggregate {
292+
trait UnevaluableAggregate extends DeclarativeAggregate {
293293

294294
override def nullable: Boolean = true
295295

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Max.scala

Lines changed: 0 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -57,34 +57,3 @@ case class Max(child: Expression) extends DeclarativeAggregate {
5757

5858
override lazy val evaluateExpression: AttributeReference = max
5959
}
60-
61-
abstract class AnyAggBase(arg: Expression)
62-
extends UnevaluableAggrgate with ImplicitCastInputTypes {
63-
64-
override def children: Seq[Expression] = arg :: Nil
65-
66-
override def dataType: DataType = BooleanType
67-
68-
override def inputTypes: Seq[AbstractDataType] = Seq(BooleanType)
69-
70-
override def checkInputDataTypes(): TypeCheckResult = {
71-
arg.dataType match {
72-
case dt if dt != BooleanType =>
73-
TypeCheckResult.TypeCheckFailure(s"Input to function '$prettyName' should have been " +
74-
s"${BooleanType.simpleString}, but it's [${arg.dataType.catalogString}].")
75-
case _ => TypeCheckResult.TypeCheckSuccess
76-
}
77-
}
78-
}
79-
80-
@ExpressionDescription(
81-
usage = "_FUNC_(expr) - Returns true if at least one value of `expr` is true.")
82-
case class AnyAgg(arg: Expression) extends AnyAggBase(arg) {
83-
override def nodeName: String = "Any"
84-
}
85-
86-
@ExpressionDescription(
87-
usage = "_FUNC_(expr) - Returns true if at least one value of `expr` is true.")
88-
case class SomeAgg(arg: Expression) extends AnyAggBase(arg) {
89-
override def nodeName: String = "Some"
90-
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Min.scala

Lines changed: 0 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -57,27 +57,3 @@ case class Min(child: Expression) extends DeclarativeAggregate {
5757

5858
override lazy val evaluateExpression: AttributeReference = min
5959
}
60-
61-
@ExpressionDescription(
62-
usage = "_FUNC_(expr) - Returns true if all values of `expr` are true.")
63-
case class EveryAgg(arg: Expression)
64-
extends UnevaluableAggrgate with ImplicitCastInputTypes {
65-
66-
override def nodeName: String = "Every"
67-
68-
override def children: Seq[Expression] = arg :: Nil
69-
70-
override def dataType: DataType = BooleanType
71-
72-
override def inputTypes: Seq[AbstractDataType] = Seq(BooleanType)
73-
74-
override def checkInputDataTypes(): TypeCheckResult = {
75-
arg.dataType match {
76-
case dt if dt != BooleanType =>
77-
TypeCheckResult.TypeCheckFailure(s"Input to function '$prettyName' should have been " +
78-
s"${BooleanType.simpleString}, but it's [${arg.dataType.catalogString}].")
79-
case _ => TypeCheckResult.TypeCheckSuccess
80-
}
81-
}
82-
}
83-
Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.sql.catalyst.expressions.aggregate
19+
20+
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
21+
import org.apache.spark.sql.catalyst.expressions._
22+
import org.apache.spark.sql.types._
23+
24+
abstract class UnevaluableBooleanAggBase(arg: Expression)
25+
extends UnevaluableAggregate with ImplicitCastInputTypes {
26+
27+
override def children: Seq[Expression] = arg :: Nil
28+
29+
override def dataType: DataType = BooleanType
30+
31+
override def inputTypes: Seq[AbstractDataType] = Seq(BooleanType)
32+
33+
override def checkInputDataTypes(): TypeCheckResult = {
34+
arg.dataType match {
35+
case dt if dt != BooleanType =>
36+
TypeCheckResult.TypeCheckFailure(s"Input to function '$prettyName' should have been " +
37+
s"${BooleanType.simpleString}, but it's [${arg.dataType.catalogString}].")
38+
case _ => TypeCheckResult.TypeCheckSuccess
39+
}
40+
}
41+
}
42+
43+
@ExpressionDescription(
44+
usage = "_FUNC_(expr) - Returns true if all values of `expr` are true.",
45+
since = "3.0.0")
46+
case class EveryAgg(arg: Expression) extends UnevaluableBooleanAggBase(arg) {
47+
override def nodeName: String = "Every"
48+
}
49+
50+
@ExpressionDescription(
51+
usage = "_FUNC_(expr) - Returns true if at least one value of `expr` is true.",
52+
since = "3.0.0")
53+
case class AnyAgg(arg: Expression) extends UnevaluableBooleanAggBase(arg) {
54+
override def nodeName: String = "Any"
55+
}
56+
57+
@ExpressionDescription(
58+
usage = "_FUNC_(expr) - Returns true if at least one value of `expr` is true.",
59+
since = "3.0.0")
60+
case class SomeAgg(arg: Expression) extends UnevaluableBooleanAggBase(arg) {
61+
override def nodeName: String = "Some"
62+
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/finishAnalysis.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,9 +32,9 @@ import org.apache.spark.sql.types._
3232
* Finds all the expressions that are unevaluable and replace/rewrite them with semantically
3333
* equivalent expressions that can be evaluated. Currently we replace two kinds of expressions :
3434
* 1) [[RuntimeReplaceable]] expressions
35-
* 2) [[UnevaluableAggrgate]] expressions such as Every, Some, Any
35+
* 2) [[UnevaluableAggregate]] expressions such as Every, Some, Any
3636
* This is mainly used to provide compatibility with other databases.
37-
* Few examples are :
37+
* Few examples are:
3838
* we use this to support "nvl" by replacing it with "coalesce".
3939
* we use this to replace Every and Any with Min and Max respectively.
4040
*/
@@ -47,6 +47,7 @@ object ReplaceExpressions extends Rule[LogicalPlan] {
4747
}
4848
}
4949

50+
5051
/**
5152
* Computes the current date and time to make sure we return the same result in a single query.
5253
*/

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -146,6 +146,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite {
146146
assertSuccess(Min('arrayField))
147147
assertSuccess(new EveryAgg('booleanField))
148148
assertSuccess(new AnyAgg('booleanField))
149+
assertSuccess(new SomeAgg('booleanField))
149150

150151
assertError(Min('mapField), "min does not support ordering on type")
151152
assertError(Max('mapField), "max does not support ordering on type")

0 commit comments

Comments
 (0)