File tree Expand file tree Collapse file tree 4 files changed +25
-19
lines changed
catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog
main/scala/org/apache/spark/sql
test/scala/org/apache/spark/sql/sources
hive/src/test/scala/org/apache/spark/sql/sources Expand file tree Collapse file tree 4 files changed +25
-19
lines changed Original file line number Diff line number Diff line change @@ -135,8 +135,9 @@ case class BucketSpec(
135
135
numBuckets : Int ,
136
136
bucketColumnNames : Seq [String ],
137
137
sortColumnNames : Seq [String ]) {
138
- if (numBuckets <= 0 ) {
139
- throw new AnalysisException (s " Expected positive number of buckets, but got ` $numBuckets`. " )
138
+ if (numBuckets <= 0 || numBuckets >= 100000 ) {
139
+ throw new AnalysisException (
140
+ s " Number of buckets should be greater than 0 but less than 100000. Got ` $numBuckets` " )
140
141
}
141
142
142
143
override def toString : String = {
Original file line number Diff line number Diff line change @@ -275,7 +275,6 @@ final class DataFrameWriter[T] private[sql](ds: Dataset[T]) {
275
275
}
276
276
277
277
numBuckets.map { n =>
278
- require(n > 0 && n < 100000 , " Bucket number must be greater than 0 and less than 100000." )
279
278
BucketSpec (n, bucketColumnNames.get, sortColumnNames.getOrElse(Nil ))
280
279
}
281
280
}
Original file line number Diff line number Diff line change @@ -206,7 +206,7 @@ class CreateTableAsSelectSuite
206
206
}
207
207
}
208
208
209
- test(" create table using as select - with non-zero buckets" ) {
209
+ test(" create table using as select - with valid number of buckets" ) {
210
210
val catalog = spark.sessionState.catalog
211
211
withTable(" t" ) {
212
212
sql(
@@ -222,19 +222,21 @@ class CreateTableAsSelectSuite
222
222
}
223
223
}
224
224
225
- test(" create table using as select - with zero buckets" ) {
225
+ test(" create table using as select - with invalid number of buckets" ) {
226
226
withTable(" t" ) {
227
- val e = intercept[AnalysisException ] {
228
- sql(
229
- s """
230
- |CREATE TABLE t USING PARQUET
231
- |OPTIONS (PATH ' ${path.toURI}')
232
- |CLUSTERED BY (a) SORTED BY (b) INTO 0 BUCKETS
233
- |AS SELECT 1 AS a, 2 AS b
234
- """ .stripMargin
235
- )
236
- }.getMessage
237
- assert(e.contains(" Expected positive number of buckets, but got `0`" ))
227
+ Seq (0 , 100000 ).foreach(numBuckets => {
228
+ val e = intercept[AnalysisException ] {
229
+ sql(
230
+ s """
231
+ |CREATE TABLE t USING PARQUET
232
+ |OPTIONS (PATH ' ${path.toURI}')
233
+ |CLUSTERED BY (a) SORTED BY (b) INTO $numBuckets BUCKETS
234
+ |AS SELECT 1 AS a, 2 AS b
235
+ """ .stripMargin
236
+ )
237
+ }.getMessage
238
+ assert(e.contains(" Number of buckets should be greater than 0 but less than 100000" ))
239
+ })
238
240
}
239
241
}
240
242
Original file line number Diff line number Diff line change @@ -38,10 +38,14 @@ class BucketedWriteSuite extends QueryTest with SQLTestUtils with TestHiveSingle
38
38
intercept[AnalysisException ](df.write.bucketBy(2 , " k" ).saveAsTable(" tt" ))
39
39
}
40
40
41
- test(" numBuckets not greater than 0 or less than 100000" ) {
41
+ test(" numBuckets be greater than 0 but less than 100000" ) {
42
42
val df = Seq (1 -> " a" , 2 -> " b" ).toDF(" i" , " j" )
43
- intercept[IllegalArgumentException ](df.write.bucketBy(0 , " i" ).saveAsTable(" tt" ))
44
- intercept[IllegalArgumentException ](df.write.bucketBy(100000 , " i" ).saveAsTable(" tt" ))
43
+
44
+ Seq (- 1 , 0 , 100000 ).foreach(numBuckets => {
45
+ val e = intercept[AnalysisException ](df.write.bucketBy(numBuckets, " i" ).saveAsTable(" tt" ))
46
+ assert(
47
+ e.getMessage.contains(" Number of buckets should be greater than 0 but less than 100000" ))
48
+ })
45
49
}
46
50
47
51
test(" specify sorting columns without bucketing columns" ) {
You can’t perform that action at this time.
0 commit comments