Skip to content

Commit 8d3a5c5

Browse files
committed
Address comments
1 parent 44c622b commit 8d3a5c5

File tree

1 file changed

+9
-10
lines changed

1 file changed

+9
-10
lines changed

python/run-tests.py

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -93,18 +93,17 @@ def run_individual_python_test(target_dir, test_name, pyspark_python):
9393
"pyspark-shell"
9494
]
9595
env["PYSPARK_SUBMIT_ARGS"] = " ".join(spark_args)
96-
str_test_name = " ".join(test_name)
97-
LOGGER.info("Starting test(%s): %s", pyspark_python, str_test_name)
96+
LOGGER.info("Starting test(%s): %s", pyspark_python, test_name)
9897
start_time = time.time()
9998
try:
10099
per_test_output = tempfile.TemporaryFile()
101100
retcode = subprocess.Popen(
102-
(os.path.join(SPARK_HOME, "bin/pyspark"), ) + test_name,
101+
[os.path.join(SPARK_HOME, "bin/pyspark")] + test_name.split(),
103102
stderr=per_test_output, stdout=per_test_output, env=env).wait()
104103
shutil.rmtree(tmp_dir, ignore_errors=True)
105104
except:
106105
LOGGER.exception(
107-
"Got exception while running %s with %s", str_test_name, pyspark_python)
106+
"Got exception while running %s with %s", test_name, pyspark_python)
108107
# Here, we use os._exit() instead of sys.exit() in order to force Python to exit even if
109108
# this code is invoked from a thread other than the main thread.
110109
os._exit(1)
@@ -126,7 +125,7 @@ def run_individual_python_test(target_dir, test_name, pyspark_python):
126125
LOGGER.exception("Got an exception while trying to print failed test output")
127126
finally:
128127
print_red("\nHad test failures in %s with %s; see logs." % (
129-
str_test_name, pyspark_python))
128+
test_name, pyspark_python))
130129
# Here, we use os._exit() instead of sys.exit() in order to force Python to exit even if
131130
# this code is invoked from a thread other than the main thread.
132131
os._exit(-1)
@@ -142,7 +141,7 @@ def run_individual_python_test(target_dir, test_name, pyspark_python):
142141
decoded_lines))
143142
skipped_counts = len(skipped_tests)
144143
if skipped_counts > 0:
145-
key = (pyspark_python, str_test_name)
144+
key = (pyspark_python, test_name)
146145
SKIPPED_TESTS[key] = skipped_tests
147146
per_test_output.close()
148147
except:
@@ -155,10 +154,10 @@ def run_individual_python_test(target_dir, test_name, pyspark_python):
155154
if skipped_counts != 0:
156155
LOGGER.info(
157156
"Finished test(%s): %s (%is) ... %s tests were skipped", pyspark_python,
158-
str_test_name, duration, skipped_counts)
157+
test_name, duration, skipped_counts)
159158
else:
160159
LOGGER.info(
161-
"Finished test(%s): %s (%is)", pyspark_python, str_test_name, duration)
160+
"Finished test(%s): %s (%is)", pyspark_python, test_name, duration)
162161

163162

164163
def get_default_python_executables():
@@ -278,10 +277,10 @@ def main():
278277
priority = 0
279278
else:
280279
priority = 100
281-
task_queue.put((priority, (python_exec, (test_goal, ))))
280+
task_queue.put((priority, (python_exec, test_goal)))
282281
else:
283282
for test_goal in testnames_to_test:
284-
task_queue.put((0, (python_exec, tuple(test_goal.split()))))
283+
task_queue.put((0, (python_exec, test_goal)))
285284

286285
# Create the target directory before starting tasks to avoid races.
287286
target_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), 'target'))

0 commit comments

Comments
 (0)