@@ -267,13 +267,10 @@ private List<String> buildSparkSubmitCommand(Map<String, String> env)
267267
268268 // We don't want the client to specify Xmx. These have to be set by their corresponding
269269 // memory flag --driver-memory or configuration entry spark.driver.memory
270+ String driverDefaultJavaOptions = config .get (SparkLauncher .DRIVER_DEFAULT_JAVA_OPTIONS );
271+ checkJavaOptions (driverDefaultJavaOptions );
270272 String driverExtraJavaOptions = config .get (SparkLauncher .DRIVER_EXTRA_JAVA_OPTIONS );
271- if (!isEmpty (driverExtraJavaOptions ) && driverExtraJavaOptions .contains ("Xmx" )) {
272- String msg = String .format ("Not allowed to specify max heap(Xmx) memory settings through " +
273- "java options (was %s). Use the corresponding --driver-memory or " +
274- "spark.driver.memory configuration instead." , driverExtraJavaOptions );
275- throw new IllegalArgumentException (msg );
276- }
273+ checkJavaOptions (driverExtraJavaOptions );
277274
278275 if (isClientMode ) {
279276 // Figuring out where the memory value come from is a little tricky due to precedence.
@@ -289,6 +286,7 @@ private List<String> buildSparkSubmitCommand(Map<String, String> env)
289286 String memory = firstNonEmpty (tsMemory , config .get (SparkLauncher .DRIVER_MEMORY ),
290287 System .getenv ("SPARK_DRIVER_MEMORY" ), System .getenv ("SPARK_MEM" ), DEFAULT_MEM );
291288 cmd .add ("-Xmx" + memory );
289+ addOptionString (cmd , driverDefaultJavaOptions );
292290 addOptionString (cmd , driverExtraJavaOptions );
293291 mergeEnvPathList (env , getLibPathEnvName (),
294292 config .get (SparkLauncher .DRIVER_EXTRA_LIBRARY_PATH ));
@@ -299,6 +297,15 @@ private List<String> buildSparkSubmitCommand(Map<String, String> env)
299297 return cmd ;
300298 }
301299
300+ private void checkJavaOptions (String javaOptions ) {
301+ if (!isEmpty (javaOptions ) && javaOptions .contains ("Xmx" )) {
302+ String msg = String .format ("Not allowed to specify max heap(Xmx) memory settings through " +
303+ "java options (was %s). Use the corresponding --driver-memory or " +
304+ "spark.driver.memory configuration instead." , javaOptions );
305+ throw new IllegalArgumentException (msg );
306+ }
307+ }
308+
302309 private List <String > buildPySparkShellCommand (Map <String , String > env ) throws IOException {
303310 // For backwards compatibility, if a script is specified in
304311 // the pyspark command line, then run it using spark-submit.
0 commit comments