-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-14914] Normalize Paths/URIs for windows. #12695
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 14 commits
2373b56
cc7d9a7
9419c36
13559bd
071fe90
54c720f
da7d9ba
66b7b4e
226c2d4
f07df07
54a0f27
973564a
9dad154
9b76656
a5289e4
affeb28
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -35,6 +35,7 @@ | |
| import java.util.Set; | ||
| import java.util.concurrent.*; | ||
|
|
||
| import org.apache.spark.util.Utils; | ||
|
||
| import scala.Tuple2; | ||
| import scala.Tuple3; | ||
| import scala.Tuple4; | ||
|
|
@@ -1071,7 +1072,7 @@ public void wholeTextFiles() throws Exception { | |
| byte[] content1 = "spark is easy to use.\n".getBytes(StandardCharsets.UTF_8); | ||
| byte[] content2 = "spark is also easy to use.\n".getBytes(StandardCharsets.UTF_8); | ||
|
|
||
| String tempDirName = tempDir.getAbsolutePath(); | ||
| String tempDirName = Utils.normalizePath(tempDir.getAbsolutePath()); | ||
| Files.write(content1, new File(tempDirName + "/part-00000")); | ||
| Files.write(content2, new File(tempDirName + "/part-00001")); | ||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -61,8 +61,7 @@ class InputOutputMetricsSuite extends SparkFunSuite with SharedSparkContext | |
| pw.close() | ||
|
|
||
| // Path to tmpFile | ||
| tmpFilePath = "file://" + tmpFile.getAbsolutePath | ||
| } | ||
| tmpFilePath = Utils.resolveURI(tmpFile.getAbsolutePath).toString } | ||
|
||
|
|
||
| after { | ||
| Utils.deleteRecursively(tmpDir) | ||
|
|
@@ -181,7 +180,7 @@ class InputOutputMetricsSuite extends SparkFunSuite with SharedSparkContext | |
| sc.textFile(tmpFilePath, 4) | ||
| .map(key => (key, 1)) | ||
| .reduceByKey(_ + _) | ||
| .saveAsTextFile("file://" + tmpFile.getAbsolutePath) | ||
| .saveAsTextFile(Utils.resolveURI(tmpFile.getAbsolutePath).toString) | ||
|
|
||
| sc.listenerBus.waitUntilEmpty(500) | ||
| assert(inputRead == numRecords) | ||
|
|
@@ -197,7 +196,7 @@ class InputOutputMetricsSuite extends SparkFunSuite with SharedSparkContext | |
| val numPartitions = 2 | ||
| val cartVector = 0 to 9 | ||
| val cartFile = new File(tmpDir, getClass.getSimpleName + "_cart.txt") | ||
| val cartFilePath = "file://" + cartFile.getAbsolutePath | ||
| val cartFilePath = Utils.resolveURI(cartFile.getAbsolutePath).toString | ||
|
|
||
| // write files to disk so we can read them later. | ||
| sc.parallelize(cartVector).saveAsTextFile(cartFilePath) | ||
|
|
@@ -265,7 +264,7 @@ class InputOutputMetricsSuite extends SparkFunSuite with SharedSparkContext | |
| // Only supported on newer Hadoop | ||
| if (SparkHadoopUtil.get.getFSBytesWrittenOnThreadCallback().isDefined) { | ||
| val file = new File(tmpDir, getClass.getSimpleName) | ||
| val filePath = "file://" + file.getAbsolutePath | ||
| val filePath = Utils.resolveURI(file.getAbsolutePath).toString | ||
|
|
||
| val records = runAndReturnRecordsWritten { | ||
| sc.parallelize(1 to numRecords).saveAsTextFile(filePath) | ||
|
|
@@ -278,7 +277,7 @@ class InputOutputMetricsSuite extends SparkFunSuite with SharedSparkContext | |
| // Only supported on newer Hadoop | ||
| if (SparkHadoopUtil.get.getFSBytesWrittenOnThreadCallback().isDefined) { | ||
| val file = new File(tmpDir, getClass.getSimpleName) | ||
| val filePath = "file://" + file.getAbsolutePath | ||
| val filePath = Utils.resolveURI(file.getAbsolutePath).toString | ||
|
|
||
| val records = runAndReturnRecordsWritten { | ||
| sc.parallelize(1 to numRecords).map(key => (key.toString, key.toString)) | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -17,6 +17,7 @@ | |
|
|
||
| package org.apache.spark.scheduler.cluster.mesos | ||
|
|
||
| import java.io.File | ||
| import java.nio.ByteBuffer | ||
| import java.util.Arrays | ||
| import java.util.Collection | ||
|
|
@@ -25,20 +26,18 @@ import java.util.Collections | |
| import scala.collection.JavaConverters._ | ||
| import scala.collection.mutable | ||
| import scala.collection.mutable.ArrayBuffer | ||
|
|
||
| import org.apache.mesos.{Protos, Scheduler, SchedulerDriver} | ||
| import org.apache.mesos.Protos._ | ||
| import org.apache.mesos.Protos.Value.Scalar | ||
| import org.mockito.{ArgumentCaptor, Matchers} | ||
| import org.mockito.Matchers._ | ||
| import org.mockito.Mockito._ | ||
| import org.scalatest.mock.MockitoSugar | ||
|
|
||
| import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSuite} | ||
| import org.apache.spark.executor.MesosExecutorBackend | ||
| import org.apache.spark.scheduler.{LiveListenerBus, SparkListenerExecutorAdded, | ||
| TaskDescription, TaskSchedulerImpl, WorkerOffer} | ||
| import org.apache.spark.scheduler.{LiveListenerBus, SparkListenerExecutorAdded, TaskDescription, TaskSchedulerImpl, WorkerOffer} | ||
| import org.apache.spark.scheduler.cluster.ExecutorInfo | ||
| import org.apache.spark.util.Utils | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Please do not remove newlines between imports, (See databricks/scala-style-guide#imports) |
||
|
|
||
| class MesosSchedulerBackendSuite extends SparkFunSuite with LocalSparkContext with MockitoSugar { | ||
|
|
||
|
|
@@ -111,7 +110,8 @@ class MesosSchedulerBackendSuite extends SparkFunSuite with LocalSparkContext wi | |
|
|
||
| test("check spark-class location correctly") { | ||
| val conf = new SparkConf | ||
| conf.set("spark.mesos.executor.home", "/mesos-home") | ||
| conf.set("spark.mesos.executor.home", | ||
| if (Utils.isWindows) "D:\\mesos-home" else "/mesos-home") | ||
|
|
||
| val listenerBus = mock[LiveListenerBus] | ||
| listenerBus.post( | ||
|
|
@@ -134,9 +134,15 @@ class MesosSchedulerBackendSuite extends SparkFunSuite with LocalSparkContext wi | |
| mesosSchedulerBackend.createResource("mem", 1024)) | ||
| // uri is null. | ||
| val (executorInfo, _) = mesosSchedulerBackend.createExecutorInfo(resources, "test-id") | ||
| val expectedSparkClass = | ||
| if (Utils.isWindows) { | ||
| "D:\\mesos-home\\bin\\spark-class" | ||
| } else { | ||
| "/mesos-home/bin/spark-class" | ||
| } | ||
| assert(executorInfo.getCommand.getValue === | ||
| s" /mesos-home/bin/spark-class ${classOf[MesosExecutorBackend].getName}") | ||
|
|
||
| " " + new File(s"$expectedSparkClass ${classOf[MesosExecutorBackend].getName}") | ||
| .getAbsolutePath) | ||
| // uri exists. | ||
| conf.set("spark.executor.uri", "hdfs:///test-app-1.0.0.tgz") | ||
| val (executorInfo1, _) = mesosSchedulerBackend.createExecutorInfo(resources, "test-id") | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -416,9 +416,9 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging { | |
| assertResolves("hdfs:/jar1,file:/jar2,jar3,jar4#jar5,path to/jar6", | ||
| s"hdfs:/jar1,file:/jar2,file:$cwd/jar3,file:$cwd/jar4#jar5,file:$cwd/path%20to/jar6") | ||
| if (Utils.isWindows) { | ||
| // It seems "#" is not valid for a local file path, and it will be escaped by the File.toURI | ||
| assertResolves("""hdfs:/jar1,file:/jar2,jar3,C:\pi.py#py.pi,C:\path to\jar4""", | ||
| s"hdfs:/jar1,file:/jar2,file:$cwd/jar3,file:/C:/pi.py#py.pi,file:/C:/path%20to/jar4") | ||
| } | ||
| s"hdfs:/jar1,file:/jar2,file:$cwd/jar3,file:/C:/pi.py%23py.pi,file:/C:/path%20to/jar4") } | ||
|
||
| } | ||
|
|
||
| test("nonLocalPaths") { | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Maybe
val uri = Utils.resolveURI(path)