Skip to content

Commit 1614933

Browse files
committed
Optimize code.
1 parent 97c1c73 commit 1614933

File tree

3 files changed

+32
-8
lines changed

3 files changed

+32
-8
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressions.scala

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ abstract class LikeAllBase extends UnaryExpression with ImplicitCastInputTypes w
187187

188188
protected def patterns: Seq[UTF8String]
189189

190-
protected def isNotDefined: Boolean
190+
protected def isNotLikeAll: Boolean
191191

192192
override def inputTypes: Seq[DataType] = StringType :: Nil
193193

@@ -205,7 +205,7 @@ abstract class LikeAllBase extends UnaryExpression with ImplicitCastInputTypes w
205205
if (exprValue == null) {
206206
null
207207
} else {
208-
val allMatched = if (isNotDefined) {
208+
val allMatched = if (isNotLikeAll) {
209209
!cache.exists(p => p.matcher(exprValue.toString).matches())
210210
} else {
211211
cache.forall(p => p.matcher(exprValue.toString).matches())
@@ -226,10 +226,9 @@ abstract class LikeAllBase extends UnaryExpression with ImplicitCastInputTypes w
226226
val allMatched = ctx.freshName("allMatched")
227227
val valueIsNull = ctx.freshName("valueIsNull")
228228
val valueArg = ctx.freshName("valueArg")
229-
val patternHasNull = ctx.addReferenceObj("hasNull", hasNull)
230229
val patternCache = ctx.addReferenceObj("patternCache", cache.asJava)
231230

232-
val matchCode = if (isNotDefined) {
231+
val matchCode = if (isNotLikeAll) {
233232
s"$pattern.matcher($valueArg.toString()).matches()"
234233
} else {
235234
s"!$pattern.matcher($valueArg.toString()).matches()"
@@ -250,18 +249,18 @@ abstract class LikeAllBase extends UnaryExpression with ImplicitCastInputTypes w
250249
| }
251250
| }
252251
|}
253-
|final boolean ${ev.isNull} = $valueIsNull || ($allMatched && $patternHasNull);
252+
|final boolean ${ev.isNull} = $valueIsNull || ($allMatched && $hasNull);
254253
|final boolean ${ev.value} = $allMatched;
255254
""".stripMargin)
256255
}
257256
}
258257

259258
case class LikeAll(child: Expression, patterns: Seq[UTF8String]) extends LikeAllBase {
260-
override def isNotDefined: Boolean = false
259+
override def isNotLikeAll: Boolean = false
261260
}
262261

263262
case class NotLikeAll(child: Expression, patterns: Seq[UTF8String]) extends LikeAllBase {
264-
override def isNotDefined: Boolean = true
263+
override def isNotLikeAll: Boolean = true
265264
}
266265

267266
// scalastyle:off line.contains.tab

sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -563,7 +563,8 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession with SQLHelper
563563
// Filter out test files with invalid extensions such as temp files created
564564
// by vi (.swp), Mac (.DS_Store) etc.
565565
val filteredFiles = files.filter(_.getName.endsWith(validFileExtensions))
566-
filteredFiles ++ dirs.flatMap(listFilesRecursively)
566+
(filteredFiles ++ dirs.flatMap(listFilesRecursively))
567+
.filter(_.getName.equals("window_part1.sql"))
567568
}
568569

569570
/** Load built-in test tables into the SparkSession. */

sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1033,6 +1033,30 @@ class DataFrameReaderWriterSuite extends QueryTest with SharedSparkSession with
10331033
}
10341034
}
10351035

1036+
test("abc2") {
1037+
spark.sql("create table SPARK_33045(id string) using parquet")
1038+
val values = Range(1, 90000)
1039+
spark.sql(s"select concat_ws(${values.mkString(", ")})").show
1040+
}
1041+
1042+
test("abc1") {
1043+
spark.sql("create table SPARK_33045(id string) using parquet")
1044+
val values = Range(1, 9000)
1045+
spark.sql(s"select * from SPARK_33045 where id in (${values.mkString(", ")}, id)").show
1046+
}
1047+
1048+
test("abc") {
1049+
spark.sql("create table SPARK_33045(id string) using parquet")
1050+
val values = Range(1, 9000)
1051+
spark.sql(s"select * from SPARK_33045 where id like all (${values.mkString(", ")})").show
1052+
}
1053+
1054+
test("concat") {
1055+
spark.sql("create table SPARK_33045(id int) using parquet")
1056+
val values = Range(1, 900)
1057+
spark.sql(s"select concat(${values.mkString(", ")}, id) from SPARK_33045").show
1058+
}
1059+
10361060
test("Insert overwrite table command should output correct schema: basic") {
10371061
withTable("tbl", "tbl2") {
10381062
withView("view1") {

0 commit comments

Comments
 (0)