Skip to content

Commit 2ee75bc

Browse files
committed
Do not truncate fields
1 parent 66351a0 commit 2ee75bc

2 files changed

Lines changed: 7 additions & 1 deletion

File tree

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,10 +99,11 @@ private[spark] object Utils extends Logging {
9999
* by setting the 'spark.debug.maxToStringFields' conf in SparkEnv.
100100
*/
101101
val DEFAULT_MAX_TO_STRING_FIELDS = 25
102+
val MAX_TO_STRING_FIELDS = "spark.debug.maxToStringFields"
102103

103104
private[spark] def maxNumToStringFields = {
104105
if (SparkEnv.get != null) {
105-
SparkEnv.get.conf.getInt("spark.debug.maxToStringFields", DEFAULT_MAX_TO_STRING_FIELDS)
106+
SparkEnv.get.conf.getInt(MAX_TO_STRING_FIELDS, DEFAULT_MAX_TO_STRING_FIELDS)
106107
} else {
107108
DEFAULT_MAX_TO_STRING_FIELDS
108109
}

sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ import java.sql.{Date, Timestamp}
2222

2323
import org.apache.hadoop.fs.{FileSystem, Path}
2424

25+
import org.apache.spark.SparkEnv
2526
import org.apache.spark.rdd.RDD
2627
import org.apache.spark.sql.{AnalysisException, Row, SparkSession}
2728
import org.apache.spark.sql.catalyst.InternalRow
@@ -260,7 +261,10 @@ class QueryExecution(val sparkSession: SparkSession, val logical: LogicalPlan) {
260261
val filePath = new Path(path)
261262
val fs = FileSystem.get(filePath.toUri, sparkSession.sparkContext.hadoopConfiguration)
262263
val dos = fs.create(filePath)
264+
val maxFields = SparkEnv.get.conf.getInt(Utils.MAX_TO_STRING_FIELDS,
265+
Utils.DEFAULT_MAX_TO_STRING_FIELDS)
263266
try {
267+
SparkEnv.get.conf.set(Utils.MAX_TO_STRING_FIELDS, Int.MaxValue.toString)
264268
dos.writeBytes("== Parsed Logical Plan ==\n")
265269
logical.treeString(dos, verbose = true, addSuffix = false)
266270
dos.writeBytes("== Analyzed Logical Plan ==\n")
@@ -276,6 +280,7 @@ class QueryExecution(val sparkSession: SparkSession, val logical: LogicalPlan) {
276280
org.apache.spark.sql.execution.debug.codegenToOutputStream(dos, executedPlan)
277281
} finally {
278282
dos.close()
283+
SparkEnv.get.conf.set(Utils.MAX_TO_STRING_FIELDS, maxFields.toString)
279284
}
280285
}
281286
}

0 commit comments

Comments
 (0)