Skip to content

Commit

Permalink
Spark: Fix failing Spark streaming rate limit unit test (apache#7470)
Browse files Browse the repository at this point in the history
Co-authored-by: Prashant Singh <psinghvk@amazon.com>
  • Loading branch information
singhpk234 and Prashant Singh authored Apr 29, 2023
1 parent 8c5605d commit 251c9fa
Showing 1 changed file with 8 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -52,13 +52,15 @@
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.internal.SQLConf;
import org.apache.spark.sql.streaming.DataStreamWriter;
import org.apache.spark.sql.streaming.OutputMode;
import org.apache.spark.sql.streaming.StreamingQuery;
import org.assertj.core.api.Assertions;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
Expand Down Expand Up @@ -110,6 +112,12 @@ public TestStructuredStreamingRead3(
Lists.newArrayList(
new SimpleRecord(15, "fifteen"), new SimpleRecord(16, "sixteen"))));

@BeforeClass
public static void setupSpark() {
// disable AQE as tests assume that writes generate a particular number of files
spark.conf().set(SQLConf.ADAPTIVE_EXECUTION_ENABLED().key(), "false");
}

@Before
public void setupTable() {
sql(
Expand Down

0 comments on commit 251c9fa

Please sign in to comment.