Skip to content

Commit 26003de

Browse files
felixcheungFelix Cheung
authored andcommitted
[SPARK-20877][SPARKR][FOLLOWUP] clean up after test move
clean up after big test move unit tests, jenkins Author: Felix Cheung <felixcheung_m@hotmail.com> Closes #18267 from felixcheung/rtestset2. (cherry picked from commit 9f4ff95) Signed-off-by: Felix Cheung <felixcheung@apache.org>
1 parent 0b0be47 commit 26003de

27 files changed

Lines changed: 32 additions & 370 deletions

R/pkg/.Rbuildignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,3 +6,4 @@
66
^README\.Rmd$
77
^src-native$
88
^html$
9+
^tests/fulltests/*

R/pkg/R/install.R

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -267,7 +267,7 @@ hadoopVersionName <- function(hadoopVersion) {
267267
# The implementation refers to appdirs package: https://pypi.python.org/pypi/appdirs and
268268
# adapt to Spark context
269269
sparkCachePath <- function() {
270-
if (.Platform$OS.type == "windows") {
270+
if (is_windows()) {
271271
winAppPath <- Sys.getenv("LOCALAPPDATA", unset = NA)
272272
if (is.na(winAppPath)) {
273273
stop(paste("%LOCALAPPDATA% not found.",

R/pkg/R/utils.R

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -900,10 +900,6 @@ isAtomicLengthOne <- function(x) {
900900
is.atomic(x) && length(x) == 1
901901
}
902902

903-
is_cran <- function() {
904-
!identical(Sys.getenv("NOT_CRAN"), "true")
905-
}
906-
907903
is_windows <- function() {
908904
.Platform$OS.type == "windows"
909905
}
@@ -912,6 +908,6 @@ hadoop_home_set <- function() {
912908
!identical(Sys.getenv("HADOOP_HOME"), "")
913909
}
914910

915-
not_cran_or_windows_with_hadoop <- function() {
916-
!is_cran() && (!is_windows() || hadoop_home_set())
911+
windows_with_hadoop <- function() {
912+
!is_windows() || hadoop_home_set()
917913
}

R/pkg/tests/fulltests/test_Serde.R

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,6 @@ context("SerDe functionality")
2020
sparkSession <- sparkR.session(master = sparkRTestMaster, enableHiveSupport = FALSE)
2121

2222
test_that("SerDe of primitive types", {
23-
skip_on_cran()
24-
2523
x <- callJStatic("SparkRHandler", "echo", 1L)
2624
expect_equal(x, 1L)
2725
expect_equal(class(x), "integer")
@@ -40,8 +38,6 @@ test_that("SerDe of primitive types", {
4038
})
4139

4240
test_that("SerDe of list of primitive types", {
43-
skip_on_cran()
44-
4541
x <- list(1L, 2L, 3L)
4642
y <- callJStatic("SparkRHandler", "echo", x)
4743
expect_equal(x, y)
@@ -69,8 +65,6 @@ test_that("SerDe of list of primitive types", {
6965
})
7066

7167
test_that("SerDe of list of lists", {
72-
skip_on_cran()
73-
7468
x <- list(list(1L, 2L, 3L), list(1, 2, 3),
7569
list(TRUE, FALSE), list("a", "b", "c"))
7670
y <- callJStatic("SparkRHandler", "echo", x)

R/pkg/tests/fulltests/test_Windows.R

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -17,16 +17,11 @@
1717
context("Windows-specific tests")
1818

1919
test_that("sparkJars tag in SparkContext", {
20-
skip_on_cran()
21-
22-
if (.Platform$OS.type != "windows") {
20+
if (!is_windows()) {
2321
skip("This test is only for Windows, skipped")
2422
}
2523

2624
testOutput <- launchScript("ECHO", "a/b/c", wait = TRUE)
2725
abcPath <- testOutput[1]
2826
expect_equal(abcPath, "a\\b\\c")
2927
})
30-
31-
message("--- End test (Windows) ", as.POSIXct(Sys.time(), tz = "GMT"))
32-
message("elapsed ", (proc.time() - timer_ptm)[3])

R/pkg/tests/fulltests/test_binaryFile.R

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,6 @@ sc <- callJStatic("org.apache.spark.sql.api.r.SQLUtils", "getJavaSparkContext",
2424
mockFile <- c("Spark is pretty.", "Spark is awesome.")
2525

2626
test_that("saveAsObjectFile()/objectFile() following textFile() works", {
27-
skip_on_cran()
28-
2927
fileName1 <- tempfile(pattern = "spark-test", fileext = ".tmp")
3028
fileName2 <- tempfile(pattern = "spark-test", fileext = ".tmp")
3129
writeLines(mockFile, fileName1)
@@ -40,8 +38,6 @@ test_that("saveAsObjectFile()/objectFile() following textFile() works", {
4038
})
4139

4240
test_that("saveAsObjectFile()/objectFile() works on a parallelized list", {
43-
skip_on_cran()
44-
4541
fileName <- tempfile(pattern = "spark-test", fileext = ".tmp")
4642

4743
l <- list(1, 2, 3)
@@ -54,8 +50,6 @@ test_that("saveAsObjectFile()/objectFile() works on a parallelized list", {
5450
})
5551

5652
test_that("saveAsObjectFile()/objectFile() following RDD transformations works", {
57-
skip_on_cran()
58-
5953
fileName1 <- tempfile(pattern = "spark-test", fileext = ".tmp")
6054
fileName2 <- tempfile(pattern = "spark-test", fileext = ".tmp")
6155
writeLines(mockFile, fileName1)
@@ -80,8 +74,6 @@ test_that("saveAsObjectFile()/objectFile() following RDD transformations works",
8074
})
8175

8276
test_that("saveAsObjectFile()/objectFile() works with multiple paths", {
83-
skip_on_cran()
84-
8577
fileName1 <- tempfile(pattern = "spark-test", fileext = ".tmp")
8678
fileName2 <- tempfile(pattern = "spark-test", fileext = ".tmp")
8779

R/pkg/tests/fulltests/test_binary_function.R

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,6 @@ rdd <- parallelize(sc, nums, 2L)
2929
mockFile <- c("Spark is pretty.", "Spark is awesome.")
3030

3131
test_that("union on two RDDs", {
32-
skip_on_cran()
33-
3432
actual <- collectRDD(unionRDD(rdd, rdd))
3533
expect_equal(actual, as.list(rep(nums, 2)))
3634

@@ -53,8 +51,6 @@ test_that("union on two RDDs", {
5351
})
5452

5553
test_that("cogroup on two RDDs", {
56-
skip_on_cran()
57-
5854
rdd1 <- parallelize(sc, list(list(1, 1), list(2, 4)))
5955
rdd2 <- parallelize(sc, list(list(1, 2), list(1, 3)))
6056
cogroup.rdd <- cogroup(rdd1, rdd2, numPartitions = 2L)
@@ -73,8 +69,6 @@ test_that("cogroup on two RDDs", {
7369
})
7470

7571
test_that("zipPartitions() on RDDs", {
76-
skip_on_cran()
77-
7872
rdd1 <- parallelize(sc, 1:2, 2L) # 1, 2
7973
rdd2 <- parallelize(sc, 1:4, 2L) # 1:2, 3:4
8074
rdd3 <- parallelize(sc, 1:6, 2L) # 1:3, 4:6

R/pkg/tests/fulltests/test_broadcast.R

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,6 @@ nums <- 1:2
2626
rrdd <- parallelize(sc, nums, 2L)
2727

2828
test_that("using broadcast variable", {
29-
skip_on_cran()
30-
3129
randomMat <- matrix(nrow = 10, ncol = 10, data = rnorm(100))
3230
randomMatBr <- broadcast(sc, randomMat)
3331

@@ -40,8 +38,6 @@ test_that("using broadcast variable", {
4038
})
4139

4240
test_that("without using broadcast variable", {
43-
skip_on_cran()
44-
4541
randomMat <- matrix(nrow = 10, ncol = 10, data = rnorm(100))
4642

4743
useBroadcast <- function(x) {

R/pkg/tests/fulltests/test_client.R

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,6 @@
1818
context("functions in client.R")
1919

2020
test_that("adding spark-testing-base as a package works", {
21-
skip_on_cran()
22-
2321
args <- generateSparkSubmitArgs("", "", "", "",
2422
"holdenk:spark-testing-base:1.3.0_0.0.5")
2523
expect_equal(gsub("[[:space:]]", "", args),
@@ -28,22 +26,16 @@ test_that("adding spark-testing-base as a package works", {
2826
})
2927

3028
test_that("no package specified doesn't add packages flag", {
31-
skip_on_cran()
32-
3329
args <- generateSparkSubmitArgs("", "", "", "", "")
3430
expect_equal(gsub("[[:space:]]", "", args),
3531
"")
3632
})
3733

3834
test_that("multiple packages don't produce a warning", {
39-
skip_on_cran()
40-
4135
expect_warning(generateSparkSubmitArgs("", "", "", "", c("A", "B")), NA)
4236
})
4337

4438
test_that("sparkJars sparkPackages as character vectors", {
45-
skip_on_cran()
46-
4739
args <- generateSparkSubmitArgs("", "", c("one.jar", "two.jar", "three.jar"), "",
4840
c("com.databricks:spark-avro_2.10:2.0.1"))
4941
expect_match(args, "--jars one.jar,two.jar,three.jar")

R/pkg/tests/fulltests/test_context.R

Lines changed: 0 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,6 @@
1818
context("test functions in sparkR.R")
1919

2020
test_that("Check masked functions", {
21-
skip_on_cran()
22-
2321
# Check that we are not masking any new function from base, stats, testthat unexpectedly
2422
# NOTE: We should avoid adding entries to *namesOfMaskedCompletely* as masked functions make it
2523
# hard for users to use base R functions. Please check when in doubt.
@@ -57,8 +55,6 @@ test_that("Check masked functions", {
5755
})
5856

5957
test_that("repeatedly starting and stopping SparkR", {
60-
skip_on_cran()
61-
6258
for (i in 1:4) {
6359
sc <- suppressWarnings(sparkR.init(master = sparkRTestMaster))
6460
rdd <- parallelize(sc, 1:20, 2L)
@@ -77,8 +73,6 @@ test_that("repeatedly starting and stopping SparkSession", {
7773
})
7874

7975
test_that("rdd GC across sparkR.stop", {
80-
skip_on_cran()
81-
8276
sc <- sparkR.sparkContext(master = sparkRTestMaster) # sc should get id 0
8377
rdd1 <- parallelize(sc, 1:20, 2L) # rdd1 should get id 1
8478
rdd2 <- parallelize(sc, 1:10, 2L) # rdd2 should get id 2
@@ -102,8 +96,6 @@ test_that("rdd GC across sparkR.stop", {
10296
})
10397

10498
test_that("job group functions can be called", {
105-
skip_on_cran()
106-
10799
sc <- sparkR.sparkContext(master = sparkRTestMaster)
108100
setJobGroup("groupId", "job description", TRUE)
109101
cancelJobGroup("groupId")
@@ -116,16 +108,12 @@ test_that("job group functions can be called", {
116108
})
117109

118110
test_that("utility function can be called", {
119-
skip_on_cran()
120-
121111
sparkR.sparkContext(master = sparkRTestMaster)
122112
setLogLevel("ERROR")
123113
sparkR.session.stop()
124114
})
125115

126116
test_that("getClientModeSparkSubmitOpts() returns spark-submit args from whitelist", {
127-
skip_on_cran()
128-
129117
e <- new.env()
130118
e[["spark.driver.memory"]] <- "512m"
131119
ops <- getClientModeSparkSubmitOpts("sparkrmain", e)
@@ -153,8 +141,6 @@ test_that("getClientModeSparkSubmitOpts() returns spark-submit args from whiteli
153141
})
154142

155143
test_that("sparkJars sparkPackages as comma-separated strings", {
156-
skip_on_cran()
157-
158144
expect_warning(processSparkJars(" a, b "))
159145
jars <- suppressWarnings(processSparkJars(" a, b "))
160146
expect_equal(lapply(jars, basename), list("a", "b"))
@@ -182,8 +168,6 @@ test_that("spark.lapply should perform simple transforms", {
182168
})
183169

184170
test_that("add and get file to be downloaded with Spark job on every node", {
185-
skip_on_cran()
186-
187171
sparkR.sparkContext(master = sparkRTestMaster)
188172
# Test add file.
189173
path <- tempfile(pattern = "hello", fileext = ".txt")

0 commit comments

Comments
 (0)