Skip to content

Commit cf70b07

Browse files
committed
Merge remote-tracking branch 'upstream/master' into mllib-stats-api-check
2 parents 0b7cec3 + fbad722 commit cf70b07

223 files changed

Lines changed: 5103 additions & 2018 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

.rat-excludes

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ log4j-defaults.properties
2525
bootstrap-tooltip.js
2626
jquery-1.11.1.min.js
2727
sorttable.js
28+
.*avsc
2829
.*txt
2930
.*json
3031
.*data

README.md

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -115,6 +115,15 @@ If your project is built with Maven, add this to your POM file's `<dependencies>
115115
</dependency>
116116

117117

118+
## A Note About Thrift JDBC server and CLI for Spark SQL
119+
120+
Spark SQL supports Thrift JDBC server and CLI.
121+
See sql-programming-guide.md for more information about those features.
122+
You can use those features by setting `-Phive-thriftserver` when building Spark as follows.
123+
124+
$ sbt/sbt -Phive-thriftserver assembly
125+
126+
118127
## Configuration
119128

120129
Please refer to the [Configuration guide](http://spark.apache.org/docs/latest/configuration.html)

bin/spark-shell.cmd

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,4 +19,4 @@ rem
1919

2020
set SPARK_HOME=%~dp0..
2121

22-
cmd /V /E /C %SPARK_HOME%\bin\spark-submit.cmd spark-shell --class org.apache.spark.repl.Main %*
22+
cmd /V /E /C %SPARK_HOME%\bin\spark-submit.cmd --class org.apache.spark.repl.Main %* spark-shell

bin/spark-sql

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -65,30 +65,30 @@ while (($#)); do
6565
case $1 in
6666
-d | --define | --database | -f | -h | --hiveconf | --hivevar | -i | -p)
6767
ensure_arg_number $# 2
68-
CLI_ARGS+=($1); shift
69-
CLI_ARGS+=($1); shift
68+
CLI_ARGS+=("$1"); shift
69+
CLI_ARGS+=("$1"); shift
7070
;;
7171

7272
-e)
7373
ensure_arg_number $# 2
74-
CLI_ARGS+=($1); shift
75-
CLI_ARGS+=(\"$1\"); shift
74+
CLI_ARGS+=("$1"); shift
75+
CLI_ARGS+=("$1"); shift
7676
;;
7777

7878
-s | --silent)
79-
CLI_ARGS+=($1); shift
79+
CLI_ARGS+=("$1"); shift
8080
;;
8181

8282
-v | --verbose)
8383
# Both SparkSubmit and SparkSQLCLIDriver recognizes -v | --verbose
84-
CLI_ARGS+=($1)
85-
SUBMISSION_ARGS+=($1); shift
84+
CLI_ARGS+=("$1")
85+
SUBMISSION_ARGS+=("$1"); shift
8686
;;
8787

8888
*)
89-
SUBMISSION_ARGS+=($1); shift
89+
SUBMISSION_ARGS+=("$1"); shift
9090
;;
9191
esac
9292
done
9393

94-
eval exec "$FWDIR"/bin/spark-submit --class $CLASS ${SUBMISSION_ARGS[*]} spark-internal ${CLI_ARGS[*]}
94+
exec "$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_ARGS[@]}" spark-internal "${CLI_ARGS[@]}"

core/src/main/java/org/apache/spark/network/netty/FileClient.java

Lines changed: 0 additions & 100 deletions
This file was deleted.

core/src/main/java/org/apache/spark/network/netty/FileServer.java

Lines changed: 0 additions & 111 deletions
This file was deleted.

core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java

Lines changed: 0 additions & 83 deletions
This file was deleted.

core/src/main/scala/org/apache/spark/ContextCleaner.scala

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -66,10 +66,15 @@ private[spark] class ContextCleaner(sc: SparkContext) extends Logging {
6666

6767
/**
6868
* Whether the cleaning thread will block on cleanup tasks.
69-
* This is set to true only for tests.
69+
*
70+
* Due to SPARK-3015, this is set to true by default. This is intended to be only a temporary
71+
* workaround for the issue, which is ultimately caused by the way the BlockManager actors
72+
* issue inter-dependent blocking Akka messages to each other at high frequencies. This happens,
73+
* for instance, when the driver performs a GC and cleans up all broadcast blocks that are no
74+
* longer in scope.
7075
*/
7176
private val blockOnCleanupTasks = sc.conf.getBoolean(
72-
"spark.cleaner.referenceTracking.blocking", false)
77+
"spark.cleaner.referenceTracking.blocking", true)
7378

7479
@volatile private var stopped = false
7580

@@ -174,9 +179,6 @@ private[spark] class ContextCleaner(sc: SparkContext) extends Logging {
174179
private def blockManagerMaster = sc.env.blockManager.master
175180
private def broadcastManager = sc.env.broadcastManager
176181
private def mapOutputTrackerMaster = sc.env.mapOutputTracker.asInstanceOf[MapOutputTrackerMaster]
177-
178-
// Used for testing. These methods explicitly blocks until cleanup is completed
179-
// to ensure that more reliable testing.
180182
}
181183

182184
private object ContextCleaner {

0 commit comments

Comments
 (0)