Skip to content

revert [SPARK-10030] Use tags to control which tests to run depending on changes being tested #20310

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 0 additions & 26 deletions common/tags/src/test/java/org/apache/spark/tags/DockerTest.java

This file was deleted.

This file was deleted.

This file was deleted.

11 changes: 0 additions & 11 deletions core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -359,17 +359,6 @@
<scope>test</scope>
</dependency>

<!--
This spark-tags test-dep is needed even though it isn't used in this module, otherwise testing-cmds that exclude
them will yield errors.
-->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-crypto</artifactId>
Expand Down
17 changes: 2 additions & 15 deletions dev/run-tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,14 +124,6 @@ def determine_modules_to_test(changed_modules):
{m: set(m.dependencies).intersection(modules_to_test) for m in modules_to_test}, sort=True)


def determine_tags_to_exclude(changed_modules):
tags = []
for m in modules.all_modules:
if m not in changed_modules:
tags += m.test_tags
return tags


# -------------------------------------------------------------------------------------------------
# Functions for working with subprocesses and shell tools
# -------------------------------------------------------------------------------------------------
Expand Down Expand Up @@ -423,7 +415,7 @@ def run_scala_tests_sbt(test_modules, test_profiles):
exec_sbt(profiles_and_goals)


def run_scala_tests(build_tool, hadoop_version, test_modules, excluded_tags):
def run_scala_tests(build_tool, hadoop_version, test_modules):
"""Function to properly execute all tests passed in as a set from the
`determine_test_suites` function"""
set_title_and_block("Running Spark unit tests", "BLOCK_SPARK_UNIT_TESTS")
Expand All @@ -433,9 +425,6 @@ def run_scala_tests(build_tool, hadoop_version, test_modules, excluded_tags):
test_profiles = get_hadoop_profiles(hadoop_version) + \
list(set(itertools.chain.from_iterable(m.build_profile_flags for m in test_modules)))

if excluded_tags:
test_profiles += ['-Dtest.exclude.tags=' + ",".join(excluded_tags)]

if build_tool == "maven":
run_scala_tests_maven(test_profiles)
else:
Expand Down Expand Up @@ -544,10 +533,8 @@ def main():
target_branch = os.environ["ghprbTargetBranch"]
changed_files = identify_changed_files_from_git_commits("HEAD", target_branch=target_branch)
changed_modules = determine_modules_for_files(changed_files)
excluded_tags = determine_tags_to_exclude(changed_modules)
if not changed_modules:
changed_modules = [modules.root]
excluded_tags = []
print("[info] Found the following changed modules:",
", ".join(x.name for x in changed_modules))

Expand Down Expand Up @@ -601,7 +588,7 @@ def main():
build_spark_assembly_sbt(hadoop_version)

# run the test suites
run_scala_tests(build_tool, hadoop_version, test_modules, excluded_tags)
run_scala_tests(build_tool, hadoop_version, test_modules)

modules_with_python_tests = [m for m in test_modules if m.python_test_goals]
if modules_with_python_tests:
Expand Down
11 changes: 1 addition & 10 deletions dev/sparktestsupport/modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class Module(object):

def __init__(self, name, dependencies, source_file_regexes, build_profile_flags=(), environ={},
sbt_test_goals=(), python_test_goals=(), blacklisted_python_implementations=(),
test_tags=(), should_run_r_tests=False, should_run_build_tests=False):
should_run_r_tests=False, should_run_build_tests=False):
"""
Define a new module.

Expand All @@ -52,8 +52,6 @@ def __init__(self, name, dependencies, source_file_regexes, build_profile_flags=
:param blacklisted_python_implementations: A set of Python implementations that are not
supported by this module's Python components. The values in this set should match
strings returned by Python's `platform.python_implementation()`.
:param test_tags A set of tags that will be excluded when running unit tests if the module
is not explicitly changed.
:param should_run_r_tests: If true, changes in this module will trigger all R tests.
:param should_run_build_tests: If true, changes in this module will trigger build tests.
"""
Expand All @@ -65,7 +63,6 @@ def __init__(self, name, dependencies, source_file_regexes, build_profile_flags=
self.environ = environ
self.python_test_goals = python_test_goals
self.blacklisted_python_implementations = blacklisted_python_implementations
self.test_tags = test_tags
self.should_run_r_tests = should_run_r_tests
self.should_run_build_tests = should_run_build_tests

Expand Down Expand Up @@ -137,9 +134,6 @@ def __hash__(self):
sbt_test_goals=[
"hive/test",
],
test_tags=[
"org.apache.spark.tags.ExtendedHiveTest"
]
)


Expand Down Expand Up @@ -523,9 +517,6 @@ def __hash__(self):
"yarn/test",
"network-yarn/test",
],
test_tags=[
"org.apache.spark.tags.ExtendedYarnTest"
]
)

mesos = Module(
Expand Down
6 changes: 0 additions & 6 deletions external/docker-integration-tests/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -100,12 +100,6 @@
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,8 @@ import org.scalatest.Ignore

import org.apache.spark.sql.Row
import org.apache.spark.sql.types.{BooleanType, ByteType, ShortType, StructType}
import org.apache.spark.tags.DockerTest


@DockerTest
@Ignore // AMPLab Jenkins needs to be updated before shared memory works on docker
class DB2IntegrationSuite extends DockerJDBCIntegrationSuite {
override val db = new DatabaseOnDocker {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,6 @@ import java.math.BigDecimal
import java.sql.{Connection, Date, Timestamp}
import java.util.Properties

import org.apache.spark.tags.DockerTest

@DockerTest
class MySQLIntegrationSuite extends DockerJDBCIntegrationSuite {
override val db = new DatabaseOnDocker {
override val imageName = "mysql:5.7.9"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ import org.apache.spark.sql.execution.{RowDataSourceScanExec, WholeStageCodegenE
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.types._
import org.apache.spark.tags.DockerTest

/**
* This patch was tested using the Oracle docker. Created this integration suite for the same.
Expand All @@ -48,7 +47,6 @@ import org.apache.spark.tags.DockerTest
* All tests in this suite are ignored because of the dependency with the oracle jar from maven
* repository.
*/
@DockerTest
class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with SharedSQLContext {
import testImplicits._

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,7 @@ import java.util.Properties
import org.apache.spark.sql.Column
import org.apache.spark.sql.catalyst.expressions.Literal
import org.apache.spark.sql.types.{ArrayType, DecimalType, FloatType, ShortType}
import org.apache.spark.tags.DockerTest

@DockerTest
class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite {
override val db = new DatabaseOnDocker {
override val imageName = "postgres:9.4.5"
Expand Down
11 changes: 0 additions & 11 deletions external/flume-sink/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -94,17 +94,6 @@
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>

<!--
This spark-tags test-dep is needed even though it isn't used in this module, otherwise testing-cmds that exclude
them will yield errors.
-->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>

</dependencies>
<build>
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
Expand Down
12 changes: 0 additions & 12 deletions external/flume/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -69,18 +69,6 @@
<groupId>org.apache.spark</groupId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>

<!--
This spark-tags test-dep is needed even though it isn't used in this module, otherwise testing-cmds that exclude
them will yield errors.
-->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>

</dependencies>
<build>
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
Expand Down
12 changes: 0 additions & 12 deletions external/kafka-0-10-sql/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -89,18 +89,6 @@
<groupId>org.apache.spark</groupId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>

<!--
This spark-tags test-dep is needed even though it isn't used in this module, otherwise testing-cmds that exclude
them will yield errors.
-->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>

</dependencies>

<build>
Expand Down
12 changes: 0 additions & 12 deletions external/kafka-0-10/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -74,18 +74,6 @@
<groupId>org.apache.spark</groupId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>

<!--
This spark-tags test-dep is needed even though it isn't used in this module, otherwise testing-cmds that exclude
them will yield errors.
-->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>

</dependencies>

<build>
Expand Down
12 changes: 0 additions & 12 deletions external/kafka-0-8/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -89,18 +89,6 @@
<groupId>org.apache.spark</groupId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>

<!--
This spark-tags test-dep is needed even though it isn't used in this module, otherwise testing-cmds that exclude
them will yield errors.
-->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>

</dependencies>
<build>
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
Expand Down
12 changes: 0 additions & 12 deletions external/kinesis-asl/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -83,18 +83,6 @@
<groupId>org.apache.spark</groupId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>

<!--
This spark-tags test-dep is needed even though it isn't used in this module, otherwise testing-cmds that exclude
them will yield errors.
-->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>

</dependencies>
<build>
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
Expand Down
12 changes: 0 additions & 12 deletions graphx/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -78,18 +78,6 @@
<groupId>org.apache.spark</groupId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>

<!--
This spark-tags test-dep is needed even though it isn't used in this module, otherwise testing-cmds that exclude
them will yield errors.
-->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>

</dependencies>
<build>
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
Expand Down
Loading