Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -181,9 +181,16 @@ List<String> buildClassPath(String appClassPath) throws IOException {
}
}
if (isTesting) {
boolean isGitHubActionsBuild = System.getenv("GITHUB_ACTIONS") != null;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is it inevitable? It looks a little weird to me to have GITHUB_ACTIONS in this part.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It is weird .. I will take a fresh look tomorrow and see if there's a better way. To avoid such bandaid fix, we should basically make a fix like #12334 .. #12334 wasn't even a complete fix if I am not mistaken. At least I think It will need a big change ..

for (String project : projects) {
addToClassPath(cp, String.format("%s/%s/target/scala-%s/test-classes", sparkHome,
project, scala));
if (isGitHubActionsBuild) {
// In GitHub Actions build, SBT option 'crossPaths' is disabled so the Scala version
// directory is not created, see SPARK-32408. This is a temporary workaround.
addToClassPath(cp, String.format("%s/%s/target/test-classes", sparkHome, project));
} else {
addToClassPath(cp, String.format("%s/%s/target/scala-%s/test-classes", sparkHome,
project, scala));
}
}
}

Expand Down
6 changes: 4 additions & 2 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1035,8 +1035,10 @@ object TestSettings {
Seq("-eNCXEHLOPQMDF")
}.getOrElse(Nil): _*),
testOptions in Test += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"),
// Required to detect Junit tests for each project, see also https://github.com/sbt/junit-interface/issues/35
crossPaths := false,
// This is currently only disabled in GitHub Actions build as a temporary workaround. See SPARK-32408.
// It is required to detect Junit tests for each project, see also
// https://github.com/sbt/junit-interface/issues/35
crossPaths := sys.env.get("GITHUB_ACTIONS").isEmpty,
// Enable Junit testing.
libraryDependencies += "com.novocode" % "junit-interface" % "0.11" % "test",
// `parallelExecutionInTest` controls whether test suites belonging to the same SBT project
Expand Down
5 changes: 3 additions & 2 deletions python/pyspark/sql/tests/test_dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -850,9 +850,10 @@ def setUpClass(cls):

SPARK_HOME = _find_spark_home()
filename_pattern = (
"sql/core/target/scala-*/test-classes/org/apache/spark/sql/"
"sql/core/target/**/test-classes/org/apache/spark/sql/"
"TestQueryExecutionListener.class")
cls.has_listener = bool(glob.glob(os.path.join(SPARK_HOME, filename_pattern)))
cls.has_listener = bool(glob.glob(
os.path.join(SPARK_HOME, filename_pattern), recursive=True))

if cls.has_listener:
# Note that 'spark.sql.queryExecutionListeners' is a static immutable configuration.
Expand Down
4 changes: 2 additions & 2 deletions python/pyspark/sql/tests/test_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -326,9 +326,9 @@ def setUpClass(cls):

SPARK_HOME = _find_spark_home()
filename_pattern = (
"sql/core/target/scala-*/test-classes/org/apache/spark/sql/"
"sql/core/target/**/test-classes/org/apache/spark/sql/"
"SparkSessionExtensionSuite.class")
if not glob.glob(os.path.join(SPARK_HOME, filename_pattern)):
if not glob.glob(os.path.join(SPARK_HOME, filename_pattern), recursive=True):
raise unittest.SkipTest(
"'org.apache.spark.sql.SparkSessionExtensionSuite' is not "
"available. Will skip the related tests.")
Expand Down
4 changes: 2 additions & 2 deletions python/pyspark/testing/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,10 +155,10 @@ def search_jar(project_relative_path, sbt_jar_name_prefix, mvn_jar_name_prefix):
# Search jar in the project dir using the jar name_prefix for both sbt build and maven
# build because the artifact jars are in different directories.
sbt_build = glob.glob(os.path.join(
project_full_path, "target/scala-*/%s*.jar" % sbt_jar_name_prefix))
project_full_path, "target/**/%s*.jar" % sbt_jar_name_prefix), recursive=True)
maven_build = glob.glob(os.path.join(
project_full_path, "target/%s*.jar" % mvn_jar_name_prefix))
jar_paths = sbt_build + maven_build
jar_paths = set(sbt_build + maven_build)
jars = [jar for jar in jar_paths if not jar.endswith(ignored_jar_suffixes)]

if not jars:
Expand Down