Skip to content

[SPARK-1841]: update scalatest to version 2.1.5 #713

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 13 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@
</dependency>
<dependency>
<groupId>org.easymock</groupId>
<artifactId>easymock</artifactId>
<artifactId>easymockclassextension</artifactId>
<scope>test</scope>
</dependency>
<dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import scala.language.postfixOps
import scala.util.Random

import org.scalatest.{BeforeAndAfter, FunSuite}
import org.scalatest.concurrent.Eventually
import org.scalatest.concurrent.{PatienceConfiguration, Eventually}
import org.scalatest.concurrent.Eventually._
import org.scalatest.time.SpanSugar._

Expand Down Expand Up @@ -76,7 +76,7 @@ class ContextCleanerSuite extends FunSuite with BeforeAndAfter with LocalSparkCo
tester.assertCleanup()

// Verify that shuffles can be re-executed after cleaning up
assert(rdd.collect().toList === collected)
assert(rdd.collect().toList.equals(collected))
}

test("cleanup broadcast") {
Expand Down Expand Up @@ -285,7 +285,7 @@ class CleanerTester(
sc.cleaner.get.attachListener(cleanerListener)

/** Assert that all the stuff has been cleaned up */
def assertCleanup()(implicit waitTimeout: Eventually.Timeout) {
def assertCleanup()(implicit waitTimeout: PatienceConfiguration.Timeout) {
try {
eventually(waitTimeout, interval(100 millis)) {
assert(isAllCleanedUp)
Expand Down
4 changes: 2 additions & 2 deletions core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,11 @@ class ShuffleNettySuite extends ShuffleSuite with BeforeAndAfterAll {

// This test suite should run all tests in ShuffleSuite with Netty shuffle mode.

override def beforeAll(configMap: Map[String, Any]) {
override def beforeAll() {
System.setProperty("spark.shuffle.use.netty", "true")
}

override def afterAll(configMap: Map[String, Any]) {
override def afterAll() {
System.setProperty("spark.shuffle.use.netty", "false")
}
}
5 changes: 3 additions & 2 deletions core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -266,8 +266,9 @@ class RDDSuite extends FunSuite with SharedSparkContext {

// we can optionally shuffle to keep the upstream parallel
val coalesced5 = data.coalesce(1, shuffle = true)
assert(coalesced5.dependencies.head.rdd.dependencies.head.rdd.asInstanceOf[ShuffledRDD[_, _, _]] !=
null)
val isEquals = coalesced5.dependencies.head.rdd.dependencies.head.rdd.
asInstanceOf[ShuffledRDD[_, _, _]] != null
assert(isEquals)

// when shuffling, we can increase the number of partitions
val coalesced6 = data.coalesce(20, shuffle = true)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import scala.language.reflectiveCalls

import akka.actor._
import akka.testkit.{ImplicitSender, TestKit, TestActorRef}
import org.scalatest.{BeforeAndAfter, FunSuite}
import org.scalatest.{BeforeAndAfter, FunSuiteLike}

import org.apache.spark._
import org.apache.spark.rdd.RDD
Expand All @@ -37,7 +37,7 @@ class BuggyDAGEventProcessActor extends Actor {
}
}

class DAGSchedulerSuite extends TestKit(ActorSystem("DAGSchedulerSuite")) with FunSuite
class DAGSchedulerSuite extends TestKit(ActorSystem("DAGSchedulerSuite")) with FunSuiteLike
with ImplicitSender with BeforeAndAfter with LocalSparkContext {

val conf = new SparkConf
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,8 @@ class TimeStampedHashMapSuite extends FunSuite {
map("k1") = strongRef
map("k2") = "v2"
map("k3") = "v3"
assert(map("k1") === strongRef)
val isEquals = map("k1") == strongRef
assert(isEquals)

// clear strong reference to "k1"
strongRef = null
Expand Down
15 changes: 11 additions & 4 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -459,25 +459,31 @@
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala.binary.version}</artifactId>
<version>1.9.1</version>
<version>2.1.5</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.easymock</groupId>
<artifactId>easymock</artifactId>
<artifactId>easymockclassextension</artifactId>
<version>3.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<version>1.8.5</version>
<version>1.9.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala.binary.version}</artifactId>
<version>1.10.0</version>
<version>1.11.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.10</version>
<scope>test</scope>
</dependency>
<dependency>
Expand Down Expand Up @@ -779,6 +785,7 @@
<arg>-unchecked</arg>
<arg>-deprecation</arg>
<arg>-feature</arg>
<arg>-language:postfixOps</arg>
</args>
<jvmArgs>
<jvmArg>-Xms1024m</jvmArg>
Expand Down
22 changes: 11 additions & 11 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -270,16 +270,17 @@ object SparkBuild extends Build {
*/

libraryDependencies ++= Seq(
"io.netty" % "netty-all" % "4.0.17.Final",
"org.eclipse.jetty" % "jetty-server" % jettyVersion,
"org.eclipse.jetty" % "jetty-util" % jettyVersion,
"org.eclipse.jetty" % "jetty-plus" % jettyVersion,
"org.eclipse.jetty" % "jetty-security" % jettyVersion,
"org.scalatest" %% "scalatest" % "1.9.1" % "test",
"org.scalacheck" %% "scalacheck" % "1.10.0" % "test",
"com.novocode" % "junit-interface" % "0.10" % "test",
"org.easymock" % "easymock" % "3.1" % "test",
"org.mockito" % "mockito-all" % "1.8.5" % "test"
"io.netty" % "netty-all" % "4.0.17.Final",
"org.eclipse.jetty" % "jetty-server" % jettyVersion,
"org.eclipse.jetty" % "jetty-util" % jettyVersion,
"org.eclipse.jetty" % "jetty-plus" % jettyVersion,
"org.eclipse.jetty" % "jetty-security" % jettyVersion,
"org.scalatest" %% "scalatest" % "2.1.5" % "test",
"org.scalacheck" %% "scalacheck" % "1.11.3" % "test",
"com.novocode" % "junit-interface" % "0.10" % "test",
"org.easymock" % "easymockclassextension" % "3.1" % "test",
"org.mockito" % "mockito-all" % "1.9.0" % "test",
"junit" % "junit" % "4.10" % "test"
),

testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"),
Expand Down Expand Up @@ -478,7 +479,6 @@ object SparkBuild extends Build {
// this non-deterministically. TODO: FIX THIS.
parallelExecution in Test := false,
libraryDependencies ++= Seq(
"org.scalatest" %% "scalatest" % "1.9.1" % "test",
"com.typesafe" %% "scalalogging-slf4j" % "1.0.1"
)
)
Expand Down
6 changes: 4 additions & 2 deletions repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -67,12 +67,14 @@ class ReplSuite extends FunSuite {
}

def assertContains(message: String, output: String) {
assert(output.contains(message),
val isContain = output.contains(message)
assert(isContain,
"Interpreter output did not contain '" + message + "':\n" + output)
}

def assertDoesNotContain(message: String, output: String) {
assert(!output.contains(message),
val isContain = output.contains(message)
assert(!isContain,
"Interpreter output contained '" + message + "':\n" + output)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark.sql.parquet

import org.scalatest.{BeforeAndAfterAll, FunSuite}
import org.scalatest.{BeforeAndAfterAll, FunSuiteLike}

import org.apache.hadoop.fs.{Path, FileSystem}
import org.apache.hadoop.mapreduce.Job
Expand Down Expand Up @@ -56,7 +56,7 @@ case class OptionalReflectData(
doubleField: Option[Double],
booleanField: Option[Boolean])

class ParquetQuerySuite extends QueryTest with FunSuite with BeforeAndAfterAll {
class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterAll {
import TestData._
TestData // Load test data tables.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,9 +92,9 @@ class BasicOperationsSuite extends TestSuiteBase {
assert(second.size === 5)
assert(third.size === 5)

assert(first.flatten.toSet === (1 to 100).toSet)
assert(second.flatten.toSet === (101 to 200).toSet)
assert(third.flatten.toSet === (201 to 300).toSet)
assert(first.flatten.toSet.equals((1 to 100).toSet) )
assert(second.flatten.toSet.equals((101 to 200).toSet))
assert(third.flatten.toSet.equals((201 to 300).toSet))
}

test("repartition (fewer partitions)") {
Expand All @@ -111,9 +111,9 @@ class BasicOperationsSuite extends TestSuiteBase {
assert(second.size === 2)
assert(third.size === 2)

assert(first.flatten.toSet === (1 to 100).toSet)
assert(second.flatten.toSet === (101 to 200).toSet)
assert(third.flatten.toSet === (201 to 300).toSet)
assert(first.flatten.toSet.equals((1 to 100).toSet))
assert(second.flatten.toSet.equals( (101 to 200).toSet))
assert(third.flatten.toSet.equals((201 to 300).toSet))
}

test("groupByKey") {
Expand Down