Skip to content

Commit 196ba9e

Browse files
author
Marcelo Vanzin
committed
Merge branch 'master' into SPARK-10439
Conflicts: sql/catalyst/src/test/scala/org/apache/spark/sql/RandomDataGenerator.scala
2 parents ffe39e4 + 52b24a6 commit 196ba9e

159 files changed

Lines changed: 2141 additions & 5075 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# Apache Spark
22

33
Spark is a fast and general cluster computing system for Big Data. It provides
4-
high-level APIs in Scala, Java, and Python, and an optimized engine that
4+
high-level APIs in Scala, Java, Python, and R, and an optimized engine that
55
supports general computation graphs for data analysis. It also supports a
66
rich set of higher-level tools including Spark SQL for SQL and DataFrames,
77
MLlib for machine learning, GraphX for graph processing,
@@ -94,5 +94,5 @@ distribution.
9494

9595
## Configuration
9696

97-
Please refer to the [Configuration guide](http://spark.apache.org/docs/latest/configuration.html)
97+
Please refer to the [Configuration Guide](http://spark.apache.org/docs/latest/configuration.html)
9898
in the online documentation for an overview on how to configure Spark.

core/src/main/scala/org/apache/spark/SparkEnv.scala

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,6 @@ import org.apache.spark.broadcast.BroadcastManager
3333
import org.apache.spark.metrics.MetricsSystem
3434
import org.apache.spark.network.BlockTransferService
3535
import org.apache.spark.network.netty.NettyBlockTransferService
36-
import org.apache.spark.network.nio.NioBlockTransferService
3736
import org.apache.spark.rpc.{RpcEndpointRef, RpcEndpoint, RpcEnv}
3837
import org.apache.spark.rpc.akka.AkkaRpcEnv
3938
import org.apache.spark.scheduler.{OutputCommitCoordinator, LiveListenerBus}
@@ -326,15 +325,7 @@ object SparkEnv extends Logging {
326325

327326
val shuffleMemoryManager = ShuffleMemoryManager.create(conf, numUsableCores)
328327

329-
val blockTransferService =
330-
conf.get("spark.shuffle.blockTransferService", "netty").toLowerCase match {
331-
case "netty" =>
332-
new NettyBlockTransferService(conf, securityManager, numUsableCores)
333-
case "nio" =>
334-
logWarning("NIO-based block transfer service is deprecated, " +
335-
"and will be removed in Spark 1.6.0.")
336-
new NioBlockTransferService(conf, securityManager)
337-
}
328+
val blockTransferService = new NettyBlockTransferService(conf, securityManager, numUsableCores)
338329

339330
val blockManagerMaster = new BlockManagerMaster(registerOrLookupEndpoint(
340331
BlockManagerMaster.DRIVER_ENDPOINT_NAME,

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -319,9 +319,6 @@ object SparkSubmit {
319319

320320
// The following modes are not supported or applicable
321321
(clusterManager, deployMode) match {
322-
case (MESOS, CLUSTER) if args.isPython =>
323-
printErrorAndExit("Cluster deploy mode is currently not supported for python " +
324-
"applications on Mesos clusters.")
325322
case (MESOS, CLUSTER) if args.isR =>
326323
printErrorAndExit("Cluster deploy mode is currently not supported for R " +
327324
"applications on Mesos clusters.")
@@ -554,7 +551,15 @@ object SparkSubmit {
554551
if (isMesosCluster) {
555552
assert(args.useRest, "Mesos cluster mode is only supported through the REST submission API")
556553
childMainClass = "org.apache.spark.deploy.rest.RestSubmissionClient"
557-
childArgs += (args.primaryResource, args.mainClass)
554+
if (args.isPython) {
555+
// Second argument is main class
556+
childArgs += (args.primaryResource, "")
557+
if (args.pyFiles != null) {
558+
sysProps("spark.submit.pyFiles") = args.pyFiles
559+
}
560+
} else {
561+
childArgs += (args.primaryResource, args.mainClass)
562+
}
558563
if (args.childArgs != null) {
559564
childArgs ++= args.childArgs
560565
}

core/src/main/scala/org/apache/spark/network/netty/NettyBlockTransferService.scala

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -149,7 +149,11 @@ class NettyBlockTransferService(conf: SparkConf, securityManager: SecurityManage
149149
}
150150

151151
override def close(): Unit = {
152-
server.close()
153-
clientFactory.close()
152+
if (server != null) {
153+
server.close()
154+
}
155+
if (clientFactory != null) {
156+
clientFactory.close()
157+
}
154158
}
155159
}

core/src/main/scala/org/apache/spark/network/nio/BlockMessage.scala

Lines changed: 0 additions & 175 deletions
This file was deleted.

core/src/main/scala/org/apache/spark/network/nio/BlockMessageArray.scala

Lines changed: 0 additions & 140 deletions
This file was deleted.

0 commit comments

Comments
 (0)