Skip to content

Commit 79f94f5

Browse files
committed
fix tests
1 parent e112af9 commit 79f94f5

File tree

2 files changed

+24
-20
lines changed

2 files changed

+24
-20
lines changed

external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaWriter.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ private[kafka010] object KafkaWriter extends Logging {
5050
topic: Option[String] = None): Unit = {
5151
schema.find(_.name == TOPIC_ATTRIBUTE_NAME).getOrElse(
5252
if (topic.isEmpty) {
53-
throw new AnalysisException(s"topic option required when no " +
53+
throw new IllegalArgumentException(s"topic option required when no " +
5454
s"'$TOPIC_ATTRIBUTE_NAME' attribute is present. Use the " +
5555
s"${KafkaSourceProvider.TOPIC_OPTION_KEY} option for setting a topic.")
5656
} else {
@@ -59,22 +59,22 @@ private[kafka010] object KafkaWriter extends Logging {
5959
).dataType match {
6060
case StringType => // good
6161
case _ =>
62-
throw new AnalysisException(s"Topic type must be a ${StringType.catalogString}")
62+
throw new IllegalArgumentException(s"Topic type must be a ${StringType.catalogString}")
6363
}
6464
schema.find(_.name == KEY_ATTRIBUTE_NAME).getOrElse(
6565
Literal(null, StringType)
6666
).dataType match {
6767
case StringType | BinaryType => // good
6868
case _ =>
69-
throw new AnalysisException(s"$KEY_ATTRIBUTE_NAME attribute type " +
69+
throw new IllegalArgumentException(s"$KEY_ATTRIBUTE_NAME attribute type " +
7070
s"must be a ${StringType.catalogString} or ${BinaryType.catalogString}")
7171
}
7272
schema.find(_.name == VALUE_ATTRIBUTE_NAME).getOrElse(
73-
throw new AnalysisException(s"Required attribute '$VALUE_ATTRIBUTE_NAME' not found")
73+
throw new IllegalArgumentException(s"Required attribute '$VALUE_ATTRIBUTE_NAME' not found")
7474
).dataType match {
7575
case StringType | BinaryType => // good
7676
case _ =>
77-
throw new AnalysisException(s"$VALUE_ATTRIBUTE_NAME attribute type " +
77+
throw new IllegalArgumentException(s"$VALUE_ATTRIBUTE_NAME attribute type " +
7878
s"must be a ${StringType.catalogString} or ${BinaryType.catalogString}")
7979
}
8080
schema.find(_.name == HEADERS_ATTRIBUTE_NAME).getOrElse(
@@ -83,7 +83,7 @@ private[kafka010] object KafkaWriter extends Logging {
8383
).dataType match {
8484
case KafkaRecordToRowConverter.headersType => // good
8585
case _ =>
86-
throw new AnalysisException(s"$HEADERS_ATTRIBUTE_NAME attribute type " +
86+
throw new IllegalArgumentException(s"$HEADERS_ATTRIBUTE_NAME attribute type " +
8787
s"must be a ${KafkaRecordToRowConverter.headersType.catalogString}")
8888
}
8989
}

external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaContinuousSinkSuite.scala

Lines changed: 18 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,8 @@ import org.scalatest.time.SpanSugar._
2525

2626
import org.apache.spark.sql.{AnalysisException, DataFrame, Row}
2727
import org.apache.spark.sql.catalyst.expressions.{AttributeReference, SpecificInternalRow, UnsafeProjection}
28+
import org.apache.spark.sql.execution.streaming.MemoryStream
29+
import org.apache.spark.sql.execution.streaming.sources.ContinuousMemoryStream
2830
import org.apache.spark.sql.streaming._
2931
import org.apache.spark.sql.types.{BinaryType, DataType}
3032
import org.apache.spark.util.Utils
@@ -215,6 +217,7 @@ class KafkaContinuousSinkSuite extends KafkaContinuousTest {
215217
test("streaming - write data with bad schema") {
216218
val inputTopic = newTopic()
217219
testUtils.createTopic(inputTopic, partitions = 1)
220+
testUtils.sendMessages(inputTopic, Array("0"))
218221

219222
val input = spark
220223
.readStream
@@ -226,21 +229,21 @@ class KafkaContinuousSinkSuite extends KafkaContinuousTest {
226229
val topic = newTopic()
227230
testUtils.createTopic(topic)
228231

229-
val ex = intercept[AnalysisException] {
232+
val ex = intercept[StreamingQueryException] {
230233
/* No topic field or topic option */
231234
createKafkaWriter(input.toDF())(
232235
withSelectExpr = "value as key", "value"
233-
)
236+
).processAllAvailable()
234237
}
235238
assert(ex.getMessage
236239
.toLowerCase(Locale.ROOT)
237240
.contains("topic option required when no 'topic' attribute is present"))
238241

239-
val ex2 = intercept[AnalysisException] {
242+
val ex2 = intercept[StreamingQueryException] {
240243
/* No value field */
241244
createKafkaWriter(input.toDF())(
242245
withSelectExpr = s"'$topic' as topic", "value as key"
243-
)
246+
).processAllAvailable()
244247
}
245248
assert(ex2.getMessage.toLowerCase(Locale.ROOT).contains(
246249
"required attribute 'value' not found"))
@@ -249,6 +252,7 @@ class KafkaContinuousSinkSuite extends KafkaContinuousTest {
249252
test("streaming - write data with valid schema but wrong types") {
250253
val inputTopic = newTopic()
251254
testUtils.createTopic(inputTopic, partitions = 1)
255+
testUtils.sendMessages(inputTopic, Array("0"))
252256

253257
val input = spark
254258
.readStream
@@ -261,28 +265,28 @@ class KafkaContinuousSinkSuite extends KafkaContinuousTest {
261265
val topic = newTopic()
262266
testUtils.createTopic(topic)
263267

264-
val ex = intercept[AnalysisException] {
268+
val ex = intercept[StreamingQueryException] {
265269
/* topic field wrong type */
266270
createKafkaWriter(input.toDF())(
267271
withSelectExpr = s"CAST('1' as INT) as topic", "value"
268-
)
272+
).processAllAvailable()
269273
}
270274
assert(ex.getMessage.toLowerCase(Locale.ROOT).contains("topic type must be a string"))
271275

272-
val ex2 = intercept[AnalysisException] {
276+
val ex2 = intercept[StreamingQueryException] {
273277
/* value field wrong type */
274278
createKafkaWriter(input.toDF())(
275279
withSelectExpr = s"'$topic' as topic", "CAST(value as INT) as value"
276-
)
280+
).processAllAvailable()
277281
}
278282
assert(ex2.getMessage.toLowerCase(Locale.ROOT).contains(
279283
"value attribute type must be a string or binary"))
280284

281-
val ex3 = intercept[AnalysisException] {
285+
val ex3 = intercept[StreamingQueryException] {
282286
/* key field wrong type */
283287
createKafkaWriter(input.toDF())(
284288
withSelectExpr = s"'$topic' as topic", "CAST(value as INT) as key", "value"
285-
)
289+
).processAllAvailable()
286290
}
287291
assert(ex3.getMessage.toLowerCase(Locale.ROOT).contains(
288292
"key attribute type must be a string or binary"))
@@ -330,18 +334,18 @@ class KafkaContinuousSinkSuite extends KafkaContinuousTest {
330334
.option("subscribe", inputTopic)
331335
.load()
332336

333-
val ex = intercept[IllegalArgumentException] {
337+
val ex = intercept[StreamingQueryException] {
334338
createKafkaWriter(
335339
input.toDF(),
336-
withOptions = Map("kafka.key.serializer" -> "foo"))()
340+
withOptions = Map("kafka.key.serializer" -> "foo"))().processAllAvailable()
337341
}
338342
assert(ex.getMessage.toLowerCase(Locale.ROOT).contains(
339343
"kafka option 'key.serializer' is not supported"))
340344

341-
val ex2 = intercept[IllegalArgumentException] {
345+
val ex2 = intercept[StreamingQueryException] {
342346
createKafkaWriter(
343347
input.toDF(),
344-
withOptions = Map("kafka.value.serializer" -> "foo"))()
348+
withOptions = Map("kafka.value.serializer" -> "foo"))().processAllAvailable()
345349
}
346350
assert(ex2.getMessage.toLowerCase(Locale.ROOT).contains(
347351
"kafka option 'value.serializer' is not supported"))

0 commit comments

Comments
 (0)