@@ -5,9 +5,11 @@ assemblyJarName <- "sparkr-assembly-0.1.jar"
55sparkR.onLoad <- function (libname , pkgname ) {
66 assemblyJarPath <- paste(libname , " /SparkR/" , assemblyJarName , sep = " " )
77 packageStartupMessage(" [SparkR] Initializing with classpath " , assemblyJarPath , " \n " )
8+
9+ sparkMem <- Sys.getenv(" SPARK_MEM" , " 512m" )
810 .sparkREnv $ libname <- libname
911 .sparkREnv $ assemblyJarPath <- assemblyJarPath
10- .jinit(classpath = assemblyJarPath )
12+ .jinit(classpath = assemblyJarPath , parameters = paste( " -Xmx " , sparkMem , sep = " " ) )
1113}
1214
1315# ' Initialize a new Spark Context.
@@ -17,16 +19,20 @@ sparkR.onLoad <- function(libname, pkgname) {
1719# ' @param master The Spark master URL.
1820# ' @param appName Application name to register with cluster manager
1921# ' @param sparkHome Spark Home directory
22+ # ' @param sparkEnvir Named list of environment variables to set on worker nodes.
2023# ' @export
2124# ' @examples
2225# '\dontrun{
23- # ' sparkR.init("local[2]", "SparkR", "/home/spark")
26+ # ' sc <- sparkR.init("local[2]", "SparkR", "/home/spark")
27+ # ' sc <- sparkR.init("local[2]", "SparkR", "/home/spark",
28+ # ' list(spark.executor.memory="1g"))
2429# '}
2530
2631sparkR.init <- function (
2732 master = " local" ,
2833 appName = " SparkR" ,
29- sparkHome = Sys.getenv(" SPARK_HOME" )) {
34+ sparkHome = Sys.getenv(" SPARK_HOME" ),
35+ sparkEnvir = list () ) {
3036
3137 if (exists(" .sparkRjsc" , envir = .sparkREnv )) {
3238 return (get(" .sparkRjsc" , envir = .sparkREnv ))
@@ -36,13 +42,22 @@ sparkR.init <- function(
3642 sparkHome <- normalizePath(sparkHome )
3743 }
3844
39- # TODO: support other constructors
45+ hm <- .jnew(" java/util/HashMap" )
46+ for ( varname in names(sparkEnvir )) {
47+ hm $ put(varname , sparkEnvir [[varname ]])
48+ }
49+
4050 assign(
4151 " .sparkRjsc" ,
42- .jnew(" org/apache/spark/api/java/JavaSparkContext" , master , appName ,
43- as.character(sparkHome ),
44- as.character(.sparkREnv $ assemblyJarPath )),
45- envir = .sparkREnv
52+ J(" edu.berkeley.cs.amplab.sparkr.RRDD" ,
53+ " createSparkContext" ,
54+ master ,
55+ appName ,
56+ as.character(sparkHome ),
57+ .jarray(as.character(.sparkREnv $ assemblyJarPath ),
58+ " java/lang/String" ),
59+ hm ),
60+ envir = .sparkREnv
4661 )
4762
4863 get(" .sparkRjsc" , envir = .sparkREnv )
0 commit comments