@@ -267,13 +267,10 @@ private List<String> buildSparkSubmitCommand(Map<String, String> env)
267
267
268
268
// We don't want the client to specify Xmx. These have to be set by their corresponding
269
269
// memory flag --driver-memory or configuration entry spark.driver.memory
270
+ String driverDefaultJavaOptions = config .get (SparkLauncher .DRIVER_DEFAULT_JAVA_OPTIONS );
271
+ checkJavaOptions (driverDefaultJavaOptions );
270
272
String driverExtraJavaOptions = config .get (SparkLauncher .DRIVER_EXTRA_JAVA_OPTIONS );
271
- if (!isEmpty (driverExtraJavaOptions ) && driverExtraJavaOptions .contains ("Xmx" )) {
272
- String msg = String .format ("Not allowed to specify max heap(Xmx) memory settings through " +
273
- "java options (was %s). Use the corresponding --driver-memory or " +
274
- "spark.driver.memory configuration instead." , driverExtraJavaOptions );
275
- throw new IllegalArgumentException (msg );
276
- }
273
+ checkJavaOptions (driverExtraJavaOptions );
277
274
278
275
if (isClientMode ) {
279
276
// Figuring out where the memory value come from is a little tricky due to precedence.
@@ -289,6 +286,7 @@ private List<String> buildSparkSubmitCommand(Map<String, String> env)
289
286
String memory = firstNonEmpty (tsMemory , config .get (SparkLauncher .DRIVER_MEMORY ),
290
287
System .getenv ("SPARK_DRIVER_MEMORY" ), System .getenv ("SPARK_MEM" ), DEFAULT_MEM );
291
288
cmd .add ("-Xmx" + memory );
289
+ addOptionString (cmd , driverDefaultJavaOptions );
292
290
addOptionString (cmd , driverExtraJavaOptions );
293
291
mergeEnvPathList (env , getLibPathEnvName (),
294
292
config .get (SparkLauncher .DRIVER_EXTRA_LIBRARY_PATH ));
@@ -299,6 +297,15 @@ private List<String> buildSparkSubmitCommand(Map<String, String> env)
299
297
return cmd ;
300
298
}
301
299
300
+ private void checkJavaOptions (String javaOptions ) {
301
+ if (!isEmpty (javaOptions ) && javaOptions .contains ("Xmx" )) {
302
+ String msg = String .format ("Not allowed to specify max heap(Xmx) memory settings through " +
303
+ "java options (was %s). Use the corresponding --driver-memory or " +
304
+ "spark.driver.memory configuration instead." , javaOptions );
305
+ throw new IllegalArgumentException (msg );
306
+ }
307
+ }
308
+
302
309
private List <String > buildPySparkShellCommand (Map <String , String > env ) throws IOException {
303
310
// For backwards compatibility, if a script is specified in
304
311
// the pyspark command line, then run it using spark-submit.
0 commit comments