1818context(" test functions in sparkR.R" )
1919
2020test_that(" Check masked functions" , {
21- skip_on_cran()
22-
2321 # Check that we are not masking any new function from base, stats, testthat unexpectedly
2422 # NOTE: We should avoid adding entries to *namesOfMaskedCompletely* as masked functions make it
2523 # hard for users to use base R functions. Please check when in doubt.
@@ -57,8 +55,6 @@ test_that("Check masked functions", {
5755})
5856
5957test_that(" repeatedly starting and stopping SparkR" , {
60- skip_on_cran()
61-
6258 for (i in 1 : 4 ) {
6359 sc <- suppressWarnings(sparkR.init(master = sparkRTestMaster ))
6460 rdd <- parallelize(sc , 1 : 20 , 2L )
@@ -77,8 +73,6 @@ test_that("repeatedly starting and stopping SparkSession", {
7773})
7874
7975test_that(" rdd GC across sparkR.stop" , {
80- skip_on_cran()
81-
8276 sc <- sparkR.sparkContext(master = sparkRTestMaster ) # sc should get id 0
8377 rdd1 <- parallelize(sc , 1 : 20 , 2L ) # rdd1 should get id 1
8478 rdd2 <- parallelize(sc , 1 : 10 , 2L ) # rdd2 should get id 2
@@ -102,8 +96,6 @@ test_that("rdd GC across sparkR.stop", {
10296})
10397
10498test_that(" job group functions can be called" , {
105- skip_on_cran()
106-
10799 sc <- sparkR.sparkContext(master = sparkRTestMaster )
108100 setJobGroup(" groupId" , " job description" , TRUE )
109101 cancelJobGroup(" groupId" )
@@ -116,16 +108,12 @@ test_that("job group functions can be called", {
116108})
117109
118110test_that(" utility function can be called" , {
119- skip_on_cran()
120-
121111 sparkR.sparkContext(master = sparkRTestMaster )
122112 setLogLevel(" ERROR" )
123113 sparkR.session.stop()
124114})
125115
126116test_that(" getClientModeSparkSubmitOpts() returns spark-submit args from whitelist" , {
127- skip_on_cran()
128-
129117 e <- new.env()
130118 e [[" spark.driver.memory" ]] <- " 512m"
131119 ops <- getClientModeSparkSubmitOpts(" sparkrmain" , e )
@@ -153,8 +141,6 @@ test_that("getClientModeSparkSubmitOpts() returns spark-submit args from whiteli
153141})
154142
155143test_that(" sparkJars sparkPackages as comma-separated strings" , {
156- skip_on_cran()
157-
158144 expect_warning(processSparkJars(" a, b " ))
159145 jars <- suppressWarnings(processSparkJars(" a, b " ))
160146 expect_equal(lapply(jars , basename ), list (" a" , " b" ))
@@ -182,8 +168,6 @@ test_that("spark.lapply should perform simple transforms", {
182168})
183169
184170test_that(" add and get file to be downloaded with Spark job on every node" , {
185- skip_on_cran()
186-
187171 sparkR.sparkContext(master = sparkRTestMaster )
188172 # Test add file.
189173 path <- tempfile(pattern = " hello" , fileext = " .txt" )
0 commit comments